diff --git a/.ci/azure-pipelines.yml b/.ci/azure-pipelines.yml deleted file mode 100644 index f67fe0032c0..00000000000 --- a/.ci/azure-pipelines.yml +++ /dev/null @@ -1,220 +0,0 @@ -# azure pipelines build and test pymapdl - -variables: - ALLOW_PLOTTING: true - package_name: ansys-dpf-core - SHELLOPTS: 'errexit:pipefail' - -trigger: - branches: - include: - - '*' - exclude: - - gh-pages - tags: - include: - - '*' - -pr: - branches: - include: - - '*' - exclude: - - '*no-ci*' - -jobs: -- job: Windows - variables: - python.version: '3.8' - DISPLAY: ':99.0' - PYANSYS_OFF_SCREEN: True - DPF_PORT: 32772 - pool: - vmImage: 'windows-2019' - - steps: - - template: templates\prepare-environment-windows.yml - - - task: PublishBuildArtifacts@1 - displayName: 'WHEEL: publish artifacts' - inputs: - PathtoPublish: '$(System.DefaultWorkingDirectory)\dist' - ArtifactName: 'ansys_dpf_core_wheel' - enabled: true - - - script: | - pip install -r requirements_test.txt - displayName: Install Test Environment - - - script: | - set THISDIR=$(System.DefaultWorkingDirectory) - cd tests - set AWP_ROOT212=%THISDIR%\server\v212 - pytest -v --junitxml=junit/test-results.xml --cov ansys.dpf.core --cov-report=xml --reruns 3 - - displayName: Test Core API - timeoutInMinutes: 10 - - - task: PublishTestResults@2 - inputs: - testResultsFormat: 'JUnit' - testResultsFiles: 'tests/junit/test-results.xml' - testRunTitle: 'windowsTests' - publishRunAttachments: true - condition: always() - - - script: | - set THISDIR=$(System.DefaultWorkingDirectory) - cd $(System.DefaultWorkingDirectory) - set AWP_ROOT212=%THISDIR%\server\v212 - pytest --doctest-modules --junitxml=junit/test-doctests-results.xml ansys\dpf\core - condition: always() - displayName: Test API Docstrings - timeoutInMinutes: 5 - - - task: PublishTestResults@2 - inputs: - testResultsFormat: 'JUnit' - testResultsFiles: 'junit/test-doctests-results.xml' - testRunTitle: 'docTestsTests' - publishRunAttachments: true - condition: always() - - - script: | - set THISDIR=$(System.DefaultWorkingDirectory) - set AWP_ROOT212=%THISDIR%\server\v212 - python .ci/run_examples.py - displayName: 'Run example scripts' - timeoutInMinutes: 5 - - - script: | - pip install twine - python setup.py sdist - twine upload --skip-existing dist/* - displayName: 'Upload to PyPi' - condition: contains(variables['Build.SourceBranch'], 'refs/tags/') - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: $(PYPI_TOKEN) - TWINE_REPOSITORY_URL: "https://upload.pypi.org/legacy/" - - - - script: | - type $(System.DefaultWorkingDirectory)\server\v212\aisol\bin\winx64\log.txt - displayName: 'Show DPF Server Logs' - condition: always() - - - template: templates\kill-servers-windows.yml - - -- job: Linux - variables: - python.version: '3.7' # due to VTK 8.1.2 requirement for docbuild - DISPLAY: ':99.0' - PYANSYS_OFF_SCREEN: True - DPF_PORT: 50055 - TEMP: $(System.DefaultWorkingDirectory)/temp - AWP_ROOT212: $(System.DefaultWorkingDirectory)/server/v212 - - pool: - vmImage: 'ubuntu-20.04' - steps: - - template: templates\prepare-environment-linux.yml - - - script: | - pip install -r requirements_test.txt - pip install pytest-azurepipelines - export AWP_ROOT212=${SYSTEM_DEFAULTWORKINGDIRECTORY}/server/v212 - cd tests - export DPF_IP=$(hostname -i) - xvfb-run pytest -v --junitxml=junit/test-results.xml --cov ansys.dpf.core --cov-report=xml --reruns 3 - export PATH=`pwd` - echo ${PATH} - displayName: Test Core API - - - task: PublishTestResults@2 - inputs: - testResultsFormat: 'JUnit' - testResultsFiles: 'junit/test-results.xml' - testRunTitle: 'linuxTests' - publishRunAttachments: true - searchFolder: 'tests/' - condition: always() - - - - script : | - echo $0 - if pgrep -x "Ans.Dpf.Grpc" > /dev/null - then - pkill -f Ans.Dpf.Grpc.exe - fi - displayName: 'Kill all servers' - condition: always() - continueOnError: true - -- job: DocumentationLinux - variables: - python.version: '3.7' # due to VTK 8.1.2 requirement for docbuild - PYANSYS_OFF_SCREEN: True - DPF_PORT: 50055 - TEMP: $(System.DefaultWorkingDirectory)/temp - AWP_ROOT212: $(System.DefaultWorkingDirectory)/server/v212 - GH_DOC_BRANCH: 'gh-pages' - - pool: - vmImage: 'ubuntu-20.04' - steps: - - template: templates\prepare-environment-linux.yml - - - script: | - pip install -r requirements_docs.txt - displayName: Install documentation packages for Python - - - script: | - sphinx-apidoc -o docs/source/api ansys ansys/dpf/core/aeneid.py -f --implicit-namespaces --separate --no-headings - xvfb-run make -C docs html SPHINXOPTS="-w build_errors.txt -N" - displayName: Build Documentation - - - task: ArchiveFiles@2 - inputs: - rootFolderOrFile: '$(System.DefaultWorkingDirectory)/docs/build' - includeRootFolder: false - archiveType: 'zip' - archiveFile: '$(System.DefaultWorkingDirectory)/docs/archive/doc-ansys-dpf-core.zip' - replaceExistingArchive: true - displayName: 'DOCUMENTATION: zip artifacts' - - - task: PublishBuildArtifacts@1 - displayName: 'DOCUMENTATION: publish artifacts' - inputs: - PathtoPublish: '$(System.DefaultWorkingDirectory)/docs/archive' - ArtifactName: doc-ansys-dpf-core - enabled: true - - - powershell: | - git init - git checkout -b $(GH_DOC_BRANCH) - git config --global user.name "pyansys-ci-bot" - git config --global user.email "$(GH_EMAIL)" - New-Item -ItemType file .nojekyll - git add . - git commit -m "Documentation generated by $(Build.DefinitionName)" - displayName: "Init git and add docs" - workingDirectory: docs/build/html - - - script: | - git remote add origin https://$(GH_PAT)@github.com/pyansys/DPF-Core-docs - git push -u origin $(GH_DOC_BRANCH) --force - displayName: "Publish GitHub Pages merge commit" - workingDirectory: docs/build/html - condition: contains(variables['Build.SourceBranch'], 'refs/tags/') - - - script : | - echo $0 - if pgrep -x "Ans.Dpf.Grpc" > /dev/null - then - pkill -f Ans.Dpf.Grpc.exe - fi - displayName: 'Kill all servers' - condition: always() - continueOnError: true diff --git a/.ci/build_doc.bat b/.ci/build_doc.bat index 250ad27ff8f..3701de0bcdf 100644 --- a/.ci/build_doc.bat +++ b/.ci/build_doc.bat @@ -1,6 +1,5 @@ -ECHO %AWP_ROOT212% set SPHINX_APIDOC_OPTIONS=inherited-members -call sphinx-apidoc -o ../docs/source/api ../ansys ../ansys/dpf/core/aeneid.py -f --implicit-namespaces --separate --no-headings +call sphinx-apidoc -o ../docs/source/api ../ansys -f --implicit-namespaces --separate --no-headings pushd . cd ../docs/ call make clean diff --git a/.ci/edit_ansys_version.py b/.ci/edit_ansys_version.py new file mode 100644 index 00000000000..f54c480b777 --- /dev/null +++ b/.ci/edit_ansys_version.py @@ -0,0 +1,19 @@ +import sys +import pkgutil +import os + +if __name__ == "__main__": + directory = os.path.dirname(pkgutil.get_loader("ansys.dpf.core").path) + file_path = os.path.join(directory, "_version.py") + for i, arg in enumerate(sys.argv): + if arg == "--version": + print(sys.argv[i+1]) + version = sys.argv[i+1] + file = open(file_path, 'r') + lines = file.readlines() + for i, line in enumerate(lines): + if "__ansys_version__" in line: + lines[i] = f'__ansys_version__ = "{version}"\n' + file.close() + with open(file_path, 'w') as file: + file.writelines(lines) diff --git a/.ci/templates/kill-servers-windows.yml b/.ci/templates/kill-servers-windows.yml deleted file mode 100644 index 53ef715bf09..00000000000 --- a/.ci/templates/kill-servers-windows.yml +++ /dev/null @@ -1,8 +0,0 @@ -steps: - - script : | - tasklist /FI "IMAGENAME eq Ans.Dpf.Grpc.exe" 2>NUL | find /I /N "Ans.Dpf.Grpc.exe">NUL - ECHO %ERRORLEVEL% - if "%ERRORLEVEL%"=="0"(taskkill /f /im Ans.Dpf.Grpc.exe) - displayName: 'Kill all servers' - condition: always() - continueOnError: true \ No newline at end of file diff --git a/.ci/templates/prepare-environment-linux.yml b/.ci/templates/prepare-environment-linux.yml deleted file mode 100644 index 59c6c1b749d..00000000000 --- a/.ci/templates/prepare-environment-linux.yml +++ /dev/null @@ -1,64 +0,0 @@ -steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: "$(python.version)" - displayName: "Use Python $(python.version)" - - - task: PipAuthenticate@1 - inputs: - artifactFeeds: "pyansys" - onlyAddExtraIndex: true - - - script: | - sudo apt update - sudo apt install zip pandoc libgl1-mesa-glx xvfb - displayName: Install OS packages - - - script: | - pip install -r .ci/requirements_test_xvfb.txt - xvfb-run python .ci/display_test.py - displayName: Test virtual framebuffer - - - script: | - pip install -r requirements_build.txt - python setup.py bdist_wheel - export WHEELNAME=`ls dist/*.whl` - echo ${WHEELNAME} - pip install ${WHEELNAME} - cd tests - xvfb-run python -c "from ansys.dpf import core; print(core.Report())" - displayName: Install ansys-dpf-core - - - task: UniversalPackages@0 - inputs: - command: "download" - downloadDirectory: "$(System.DefaultWorkingDirectory)" - feedsToUse: "internal" - vstsFeed: "705e121a-9631-49f5-8aaf-c7142856f923" - vstsFeedPackage: "dpf-linux" #TODO: update hash of packages - vstsPackageVersion: "21.2.5" - displayName: Download DPF linux package - - - script: | - echo $0 - if pgrep -x "Ans.Dpf.Grpc" > /dev/null - then - pkill -f Ans.Dpf.Grpc.exe - fi - displayName: "Kill all servers" - condition: always() - continueOnError: true - - - script: | - env - displayName: Display env - - - script: | - cd ${AWP_ROOT212}/aisol/bin/linx64 - pwd - chmod 755 Ans.Dpf.Grpc.sh - chmod 755 Ans.Dpf.Grpc.exe - ./Ans.Dpf.Grpc.sh --port 50054 & > log.txt - export DPF_IP=$(hostname -i) - python -c "from ansys.dpf import core; core.connect_to_server(ip= '${DPF_IP}', port=50054); print('Python Connected')" - displayName: Start DPF Server diff --git a/.ci/templates/prepare-environment-windows.yml b/.ci/templates/prepare-environment-windows.yml deleted file mode 100644 index b9ac78e57d2..00000000000 --- a/.ci/templates/prepare-environment-windows.yml +++ /dev/null @@ -1,53 +0,0 @@ -steps: - - powershell: | - Set-StrictMode -Version Latest - $ErrorActionPreference = "Stop" - $PSDefaultParameterValues['*:ErrorAction']='Stop' - git clone --depth 1 git://github.com/pyvista/gl-ci-helpers.git - powershell gl-ci-helpers/appveyor/install_opengl.ps1 - displayName: 'Install OpenGL' - - - powershell: | - .ci/setup_headless_display.sh - pip install -r .ci/requirements_test_xvfb.txt - python .ci/display_test.py - displayName: Install test offscreen rendering - - - task: UsePythonVersion@0 - inputs: - versionSpec: $(python.version) - addToPath: true - - - task: PipAuthenticate@1 - inputs: - artifactFeeds: 'pyansys' - onlyAddExtraIndex: true - - - script: | - pip install -r requirements_build.txt - python setup.py bdist_wheel - FOR /F %%a in ('dir /s/b dist\*.whl') do SET WHEELPATH=%%a - ECHO %WHEELPATH% - pip install %WHEELPATH% - cd tests - python -c "from ansys.dpf import core; print(core.Report(gpu=False))" - - displayName: Install ansys-dpf-core - - - task: UniversalPackages@0 - inputs: - command: 'download' - downloadDirectory: '$(System.DefaultWorkingDirectory)' - feedsToUse: 'internal' - vstsFeed: '705e121a-9631-49f5-8aaf-c7142856f923' - vstsFeedPackage: 'dpf-windows' - vstsPackageVersion: '21.2.3' - - - script: | - @echo on - dir $(System.DefaultWorkingDirectory)\server\v212\aisol\bin\winx64 - set THISDIR=$(System.DefaultWorkingDirectory) - cd %THISDIR%\server\v212\aisol\bin\winx64 - START /B Ans.Dpf.Grpc.bat --address 127.0.0.1 --port %DPF_PORT% > log.txt 2>&1 - python -c "from ansys.dpf import core; core.connect_to_server(port=$(DPF_PORT)); print('Python Connected')" - displayName: Start DPF Server \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000000..7ae843b54b1 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,308 @@ +name: GitHub Actions + +on: + pull_request: + branches-ignore: + - '*no-ci*' + push: + tags: + - "*" + branches: + - master + - "release*" + +env: + PYANSYS_OFF_SCREEN: True + DPF_PORT: 32772 + +jobs: + test_windows: + name: Windows + runs-on: windows-2019 + + env: + ANSYS_VERSION: 221 + + steps: + - uses: actions/checkout@v2 + + - name: Setup Python + uses: actions/setup-python@v2.1.4 + with: + python-version: 3.8 + + - name: Clone dpf-standalone + run: | + git clone https://${{secrets.DPF_PIPELINE}}@github.com/ansys-dpf/dpf-standalone + + - name: Set AWP_ROOT$env:ANSYS_VERSION + run: echo "AWP_ROOT$env:ANSYS_VERSION=${{github.workspace}}\dpf-standalone/v$env:ANSYS_VERSION" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf-8 -Append + + - name: Set SERVER + run: echo "SERVER=$env:AWP_ROOT221" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf-8 -Append + + - name: Extract standalone zip + run: | + ls + cd dpf-standalone + 7z x v$env:ANSYS_VERSION.7z + ls v$env:ANSYS_VERSION + + - name: Install ansys-dpf-core + shell: cmd + run: | + pip install -r requirements_build.txt + python setup.py bdist_wheel + FOR /F %%a in ('dir /s/b dist\*.whl') do SET WHEELPATH=%%a + ECHO %WHEELPATH% + cd tests + pip install %WHEELPATH% + python -c "from ansys.dpf import core; print(core.Report(gpu=False))" + + - name: Start DPF Server + run: | + cd $env:SERVER\aisol\bin\winx64 + ls . + Start-Process -FilePath "./Ans.Dpf.Grpc.bat" -ArgumentList "--address 127.0.0.1 --port $env:DPF_PORT" -RedirectStandardOutput "log.txt" + python -c "from ansys.dpf import core; core.connect_to_server(port=$env:DPF_PORT); print('Python Connected')" + timeout-minutes: 1 + + - name: Kill all servers + shell: cmd + run: | + tasklist /FI "IMAGENAME eq Ans.Dpf.Grpc.exe" 2>NUL | find /I /N "Ans.Dpf.Grpc.exe">NUL + ECHO %ERRORLEVEL% + if "%ERRORLEVEL%"=="0"(taskkill /f /im Ans.Dpf.Grpc.exe) + continue-on-error: true + + - name: Show DPF Server Logs + run: Get-Content -Path $env:SERVER\aisol\bin\winx64\log.txt + continue-on-error: true + if: always() + + - name: WHEEL publish artifacts + uses: actions/upload-artifact@v2 + with: + name: ansys_dpf_core_wheel + path: ./dist/* + + - name: Install OpenGL + run: | + Set-StrictMode -Version Latest + $ErrorActionPreference = "Stop" + $PSDefaultParameterValues['*:ErrorAction']='Stop' + git clone --depth 1 git://github.com/pyvista/gl-ci-helpers.git + powershell gl-ci-helpers/appveyor/install_opengl.ps1 + + - name: Install test offscreen rendering + run: | + .ci/setup_headless_display.sh + pip install -r .ci/requirements_test_xvfb.txt + python .ci/display_test.py + + - name: Install Test Environment + run: | + pip install -r requirements_test.txt + if: always() + + - name: Test API Docstrings + run: | + pytest --doctest-modules --junitxml=junit/test-doctests-results.xml ansys/dpf/core + + - name: Kill all servers + shell: cmd + run: | + tasklist /FI "IMAGENAME eq Ans.Dpf.Grpc.exe" 2>NUL | find /I /N "Ans.Dpf.Grpc.exe">NUL + ECHO %ERRORLEVEL% + if "%ERRORLEVEL%"=="0"(taskkill /f /im Ans.Dpf.Grpc.exe) + continue-on-error: true + + - name: Publish Doc Test Results + uses: actions/upload-artifact@v2 + with: + name: ansys_dpf_core_doctest + path: junit/test-doctests-results.xml + if: always() + + - name: Test Core API + run: | + cd tests + New-Item -Path ".\..\" -Name "local_server_test" -ItemType "directory" + Copy-Item -Path ".\test_local_server.py",".\test_multi_server.py", ".\test_workflow.py" -Destination ".\..\local_server_test\" + Copy-Item -Path ".\conftest.py" -Destination ".\..\local_server_test\conftest.py" + Remove-Item -Path ".\test_local_server.py",".\test_multi_server.py", ".\test_workflow.py" + pytest --log-level=ERROR --junitxml=junit/test-results1.xml --reruns 2 . + + - name: Test Core API 2 + run: | + cd local_server_test + pytest --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . + timeout-minutes: 10 + + - name: Kill all servers + shell: cmd + run: | + tasklist /FI "IMAGENAME eq Ans.Dpf.Grpc.exe" 2>NUL | find /I /N "Ans.Dpf.Grpc.exe">NUL + ECHO %ERRORLEVEL% + if "%ERRORLEVEL%"=="0"(taskkill /f /im Ans.Dpf.Grpc.exe) + continue-on-error: true + + - name: Publish Test Results + uses: actions/upload-artifact@v2 + with: + name: ansys_dpf_core_pytest + path: tests/junit/test-results*.xml + if: always() + + - name: 'Upload to PyPi' + if: contains(github.ref, 'refs/tags') + shell: cmd + run: | + pip install twine + python setup.py sdist + twine upload --skip-existing dist/* + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{secrets.PYPI_TOKEN}} + TWINE_REPOSITORY_URL: "https://upload.pypi.org/legacy/" + + build_doc: + name: Documentation + runs-on: windows-2019 + + env: + ANSYS_VERSION: 221 + + steps: + - uses: actions/checkout@v2 + + - name: Setup Python + uses: actions/setup-python@v2.1.4 + with: + python-version: 3.8 + + - name: Clone dpf-standalone + run: | + git clone https://${{secrets.DPF_PIPELINE}}@github.com/ansys-dpf/dpf-standalone + + - name: Set AWP_ROOT$env:ANSYS_VERSION + run: echo "AWP_ROOT$env:ANSYS_VERSION=${{github.workspace}}\dpf-standalone/v$env:ANSYS_VERSION" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf-8 -Append + + - name: Set SERVER + run: echo "SERVER=$env:AWP_ROOT221" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf-8 -Append + + - name: Extract standalone zip + run: | + ls + cd dpf-standalone + 7z x v$env:ANSYS_VERSION.7z + ls v$env:ANSYS_VERSION + + - name: Install ansys-dpf-core + shell: cmd + run: | + pip install -r requirements_build.txt + python setup.py bdist_wheel + FOR /F %%a in ('dir /s/b dist\*.whl') do SET WHEELPATH=%%a + ECHO %WHEELPATH% + cd tests + pip install %WHEELPATH% + python -c "from ansys.dpf import core; print(core.Report(gpu=False))" + + - name: Start DPF Server + run: | + cd $env:SERVER\aisol\bin\winx64 + ls . + Start-Process -FilePath "./Ans.Dpf.Grpc.bat" -ArgumentList "--address 127.0.0.1 --port $env:DPF_PORT" -RedirectStandardOutput "log.txt" -NoNewWindow + python -c "from ansys.dpf import core; core.connect_to_server(port=$env:DPF_PORT); print('Python Connected')" + timeout-minutes: 1 + + - name: Kill all servers + shell: cmd + run: | + tasklist /FI "IMAGENAME eq Ans.Dpf.Grpc.exe" 2>NUL | find /I /N "Ans.Dpf.Grpc.exe">NUL + ECHO %ERRORLEVEL% + if "%ERRORLEVEL%"=="0"(taskkill /f /im Ans.Dpf.Grpc.exe) + continue-on-error: true + + - name: Show DPF Server Logs + run: Get-Content -Path $env:SERVER\aisol\bin\winx64\log.txt + continue-on-error: true + if: always() + + - name: Install OpenGL + run: | + Set-StrictMode -Version Latest + $ErrorActionPreference = "Stop" + $PSDefaultParameterValues['*:ErrorAction']='Stop' + git clone --depth 1 git://github.com/pyvista/gl-ci-helpers.git + powershell gl-ci-helpers/appveyor/install_opengl.ps1 + + - name: Install test offscreen rendering + run: | + .ci/setup_headless_display.sh + pip install -r .ci/requirements_test_xvfb.txt + python .ci/display_test.py + + - name: Install documentation packages for Python + run: | + pip install -r requirements_docs.txt + + - name: Build Documentation + shell: cmd + run: | + cd .ci + build_doc.bat > ..\docs\log.txt 2>&1 + timeout-minutes: 20 + + - name: DOCUMENTATION zip artifacts + run: | + 7z a -tzip ./docs/archive/doc-ansys-dpf-core.zip ./docs/build + if: always() + + - name: Kill all servers + shell: cmd + run: | + tasklist /FI "IMAGENAME eq Ans.Dpf.Grpc.exe" 2>NUL | find /I /N "Ans.Dpf.Grpc.exe">NUL + ECHO %ERRORLEVEL% + if "%ERRORLEVEL%"=="0"(taskkill /f /im Ans.Dpf.Grpc.exe) + continue-on-error: true + if: always() + + - name: Publish Documentation artifact + uses: actions/upload-artifact@v2 + with: + name: doc-ansys-dpf-core + path: ./docs/archive/doc-ansys-dpf-core.zip + if: always() + + - name: Publish Documentation log + uses: actions/upload-artifact@v2 + with: + name: doc-ansys-dpf-core-log + path: ./docs/*.txt + if: always() + + - name: Init git and add docs + if: contains(github.ref, 'refs/tags') + run: | + cd docs/build/html + git init + git checkout -b $(GH_DOC_BRANCH) + git config --global user.name "pyansys-ci-bot" + git config --global user.email "$(GH_EMAIL)" + New-Item -ItemType file .nojekyll + git add . + git commit -m "Documentation generated" + env: + GH_DOC_BRANCH: gh-pages + GH_EMAIL: pyansys.github.bot@ansys.com + + - name: Publish GitHub Pages merge commit + if: contains(github.ref, 'refs/tags') + run: | + cd docs/build/html + git remote add origin https://${{secrets.PYANSYS_CI_BOT_TOKEN}}@github.com/pyansys/DPF-Core-docs + git push -u origin $(GH_DOC_BRANCH) --force + env: + GH_DOC_BRANCH: gh-pages \ No newline at end of file diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index f5342ad6080..37c1c3922a4 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -1,7 +1,16 @@ # check spelling, codestyle name: Style Check -on: [push, pull_request, workflow_dispatch] +on: + pull_request: + branches-ignore: + - '*no-ci*' + push: + tags: + - "*" + branches: + - master + - "release*" jobs: build: diff --git a/.gitignore b/.gitignore index bffa239e82d..e43abd59d11 100644 --- a/.gitignore +++ b/.gitignore @@ -22,7 +22,6 @@ __pycache__ # compiled documentation docs/build -docs/source/examples # pip files *.egg-info @@ -45,5 +44,8 @@ test-output.xml # downloaded files ansys/dpf/core/examples/_cache/ -# Virtual environment -venv/ \ No newline at end of file +*.orig +venv/* + +# Visual studio code settings +.vscode diff --git a/README.md b/README.md index 11412319c21..a7ae433a2c3 100644 --- a/README.md +++ b/README.md @@ -42,8 +42,8 @@ pip install ansys-dpf-core You can also clone and install this repository with: ``` -git clone https://github.com/pyansys/DPF-Core -cd DPF-Core +git clone https://github.com/pyansys/pydpf-core +cd pydpf-core pip install . --user ``` diff --git a/ansys/dpf/core/__init__.py b/ansys/dpf/core/__init__.py index d8d8d0450bd..22cb8f18359 100644 --- a/ansys/dpf/core/__init__.py +++ b/ansys/dpf/core/__init__.py @@ -61,6 +61,7 @@ ) from ansys.dpf.core import server from ansys.dpf.core import check_version +from ansys.dpf.core import path_utilities from ansys.dpf.core import settings # for matplotlib @@ -69,28 +70,19 @@ # Setup data directory USER_DATA_PATH = None -EXAMPLES_PATH = None -if os.environ.get("DPF_DOCKER", False): # pragma: no cover - # Running DPF within docker (likely for CI) - # path must be relative to DPF directory - # - # assumes the following docker mount: - # -v /tmp:/dpf/_cache - EXAMPLES_PATH = "/tmp" -else: - try: - import appdirs +LOCAL_DOWNLOADED_EXAMPLES_PATH = None +try: + import appdirs - USER_DATA_PATH = appdirs.user_data_dir("ansys-dpf-core") - if not os.path.exists(USER_DATA_PATH): # pragma: no cover - os.makedirs(USER_DATA_PATH) - - EXAMPLES_PATH = os.path.join(USER_DATA_PATH, "examples") - if not os.path.exists(EXAMPLES_PATH): # pragma: no cover - os.makedirs(EXAMPLES_PATH) - except: # pragma: no cover - pass + USER_DATA_PATH = appdirs.user_data_dir("ansys-dpf-core") + if not os.path.exists(USER_DATA_PATH): # pragma: no cover + os.makedirs(USER_DATA_PATH) + LOCAL_DOWNLOADED_EXAMPLES_PATH = os.path.join(USER_DATA_PATH, "examples") + if not os.path.exists(LOCAL_DOWNLOADED_EXAMPLES_PATH): # pragma: no cover + os.makedirs(LOCAL_DOWNLOADED_EXAMPLES_PATH) +except: # pragma: no cover + pass SERVER = None diff --git a/ansys/dpf/core/_version.py b/ansys/dpf/core/_version.py index 8dc8e72ccf0..f1c4feb1e7a 100644 --- a/ansys/dpf/core/_version.py +++ b/ansys/dpf/core/_version.py @@ -1,8 +1,8 @@ """Version for ansys-dpf-core""" # major, minor, patch -version_info = 0, 3, "dev0" +version_info = 0, 4, 'dev0' # Nice string for the version __version__ = ".".join(map(str, version_info)) -__ansys_version__ = "212" +__ansys_version__ = "221" min_server_version = "2.0" diff --git a/ansys/dpf/core/available_result.py b/ansys/dpf/core/available_result.py index 6ed7cbc9635..cee7b70f737 100644 --- a/ansys/dpf/core/available_result.py +++ b/ansys/dpf/core/available_result.py @@ -5,7 +5,7 @@ from warnings import warn from ansys.grpc.dpf import available_result_pb2, base_pb2 -from ansys.dpf.core.common import _remove_spaces +from ansys.dpf.core.common import _remove_spaces, _make_as_function_name class AvailableResult: @@ -58,10 +58,13 @@ def __str__(self): @property def name(self): """Result operator.""" - if self.operator_name in _result_properties: - return _result_properties[self.operator_name]["scripting_name"] + if hasattr(self._message, "properties") and "scripting_name" in self._message.properties: + name = self._message.properties["scripting_name"] + elif self.operator_name in _result_properties: + name = _result_properties[self.operator_name]["scripting_name"] else: - return _remove_spaces(self._message.physicsname) + name = _remove_spaces(self._message.physicsname) + return _make_as_function_name(name) @property def n_components(self): @@ -122,6 +125,8 @@ def sub_results(self): @property def native_location(self): """Native location of the result.""" + if hasattr(self._message, "properties") and "location" in self._message.properties: + return self._message.properties["location"] if self.operator_name in _result_properties: return _result_properties[self.operator_name]["location"] diff --git a/ansys/dpf/core/check_version.py b/ansys/dpf/core/check_version.py index ba22bab9ad3..d9621cc16c7 100644 --- a/ansys/dpf/core/check_version.py +++ b/ansys/dpf/core/check_version.py @@ -6,6 +6,7 @@ from ansys.dpf.core import errors as dpf_errors import sys +from functools import wraps def server_meet_version(required_version, server): @@ -169,6 +170,7 @@ def decorator(func): "version_requires decorator must be a string with a dot separator." ) + @wraps(func) def wrapper(self, *args, **kwargs): """Call the original function""" server = self._server @@ -183,10 +185,10 @@ def wrapper(self, *args, **kwargs): if size != 0: max_size = 8.0e6 // sys.getsizeof(ids[0]) if size > max_size: - server.check_version(min_version) + server.check_version(min_version, " called from " + func.__name__) # default case, just check the compatibility else: - server.check_version(min_version) + server.check_version(min_version, " called from " + func.__name__) return func(self, *args, **kwargs) diff --git a/ansys/dpf/core/collection.py b/ansys/dpf/core/collection.py index 0dfb04d7e8b..d774ab2c563 100644 --- a/ansys/dpf/core/collection.py +++ b/ansys/dpf/core/collection.py @@ -4,6 +4,8 @@ Contains classes associated with the DPF collection. """ +import numpy as np +from typing import NamedTuple from ansys import dpf from ansys.grpc.dpf import collection_pb2, collection_pb2_grpc @@ -15,7 +17,7 @@ from ansys.dpf.core.time_freq_support import TimeFreqSupport from ansys.dpf.core.errors import protect_grpc from ansys.dpf.core import server -from ansys.dpf.core.scoping import Scoping +from ansys.dpf.core import scoping class Collection: @@ -39,7 +41,7 @@ class Collection: """ - def __init__(self, dpf_type, collection=None, server: server.DpfServer = None): + def __init__(self, dpf_type=None, collection=None, server: server.DpfServer=None): if server is None: server = dpf.core._global_server() @@ -59,9 +61,44 @@ def __init__(self, dpf_type, collection=None, server: server.DpfServer = None): elif hasattr(collection, "_message"): self._message = collection._message self._collection = collection # keep the base collection used for copy + else: self._message = collection + if self._type == None: + self._type = types(int(self._message.type) + 1) + + @staticmethod + def integral_collection(inpt, server: server.DpfServer = None): + """Creates a collection of integral type with a list. + + The collection of integral is the equivalent of an array of + data sent server side. It can be used to efficiently stream + large data to the server. + + Parameters + ---------- + inpt : list[float], list[int], numpy.array + list to transfer server side + + Returns + ------- + Collection + + Notes + ----- + Used by default by the ``'Operator'`` and the``'Workflow'`` when a + list is connected or returned. + + """ + if all(isinstance(x, int) for x in inpt): + dpf_type = types.int + elif all(isinstance(x, float) for x in inpt): + dpf_type = types.double + out = Collection(dpf_type=dpf_type, server=server) + out._set_integral_entries(inpt) + return out + def set_labels(self, labels): """Set labels for scoping the collection. @@ -89,7 +126,8 @@ def add_label(self, label, default_value=None): Parameters ---------- label : str - Labels to scope the etnries to. For example, ``"time"``. + Labels to scope the entries to. For example, ``"time"``. + default_value : int, optional Default value for existing fields in the collection. The default is ``None``. @@ -162,6 +200,25 @@ def _get_entries(self, label_space_or_index): entries : list[Scoping], list[Field], list[MeshedRegion] Entries corresponding to the request. """ + entries = self._get_entries_tuple(label_space_or_index) + if isinstance(entries, list): + return [entry.entry for entry in entries] + return entries + + def _get_entries_tuple(self, label_space_or_index): + """Retrieve the entries at a requested label space or index. + + Parameters + ---------- + label_space_or_index : dict[str,int] + Label space or index. For example, + ``{"time": 1, "complex": 0}`` or the index of the field. + + Returns + ------- + entries : list[_CollectionEntry] + Entries corresponding to the request. + """ request = collection_pb2.EntryRequest() request.collection.CopyFrom(self._message) @@ -174,21 +231,34 @@ def _get_entries(self, label_space_or_index): out = self._stub.GetEntries(request) list_out = [] for obj in out.entries: + label_space = {} + if obj.HasField("label_space"): + for key in obj.label_space.label_space: + label_space[key] = obj.label_space.label_space[key] if obj.HasField("dpf_type"): if self._type == types.scoping: unpacked_msg = scoping_pb2.Scoping() obj.dpf_type.Unpack(unpacked_msg) - list_out.append(Scoping(scoping=unpacked_msg, server=self._server)) + list_out.append( + _CollectionEntry( + label_space=label_space, + entry=Scoping(scoping=unpacked_msg, server=self._server))) elif self._type == types.field: unpacked_msg = field_pb2.Field() obj.dpf_type.Unpack(unpacked_msg) - list_out.append(Field(field=unpacked_msg, server=self._server)) + list_out.append( + _CollectionEntry( + label_space=label_space, + entry=Field(field=unpacked_msg, server=self._server))) elif self._type == types.meshed_region: unpacked_msg = meshed_region_pb2.MeshedRegion() obj.dpf_type.Unpack(unpacked_msg) list_out.append( - MeshedRegion(mesh=unpacked_msg, server=self._server) + _CollectionEntry( + label_space=label_space, + entry=MeshedRegion(mesh=unpacked_msg, server=self._server)) ) + if len(list_out) == 0: list_out = None return list_out @@ -230,16 +300,8 @@ def get_label_space(self, index): Scoping of the requested entry. For example, ``{"time": 1, "complex": 0}``. """ - request = collection_pb2.EntryRequest() - request.collection.CopyFrom(self._message) - request.index = index - out = self._stub.GetEntries(request).entries - out = out[0].label_space.label_space - dictOut = {} - for key in out: - dictOut[key] = out[key] - - return dictOut + entries = self._get_entries_tuple(index) + return entries[0].label_space def get_available_ids_for_label(self, label="time"): """Retrieve the IDs assigned to an input label. @@ -350,6 +412,37 @@ def _set_time_freq_support(self, time_freq_support): request.label = "time" self._stub.UpdateSupport(request) + def _set_integral_entries(self, input): + if self._type == types.int: + dtype = np.int32 + else: + dtype = np.float + + if isinstance(input, range): + input = np.array(list(input), dtype=dtype) + elif not isinstance(input, (np.ndarray, np.generic)): + input = np.array(input, dtype=dtype) + else: + input = np.array(list(input), dtype=dtype) + + metadata = [(u"size_bytes", f"{input.size * input.itemsize}")] + request = collection_pb2.UpdateAllDataRequest() + request.collection.CopyFrom(self._message) + + self._stub.UpdateAllData(scoping._data_chunk_yielder(request, input), metadata=metadata) + + def _get_integral_entries(self): + request = collection_pb2.GetAllDataRequest() + request.collection.CopyFrom(self._message) + if self._type == types.int: + data_type = u"int" + dtype = np.int32 + else: + data_type = u"double" + dtype = np.float + service = self._stub.GetAllData(request, metadata=[(u"float_or_double", data_type)]) + return scoping._data_get_chunk_(dtype, service) + def _connect(self): """Connect to the gRPC service.""" return collection_pb2_grpc.CollectionServiceStub(self._server.channel) @@ -370,7 +463,10 @@ def __str__(self): Description of the entity. """ request = base_pb2.DescribeRequest() - request.dpf_type_id = self._message.id + if isinstance(self._message.id, int): + request.dpf_type_id = self._message.id + else: + request.dpf_type_id = self._message.id.id return self._stub.Describe(request).description def __len__(self): @@ -387,3 +483,7 @@ def __del__(self): def __iter__(self): for i in range(len(self)): yield self[i] + +class _CollectionEntry(NamedTuple): + label_space: dict + entry: object diff --git a/ansys/dpf/core/common.py b/ansys/dpf/core/common.py index 82e1bc219ce..d60df322059 100644 --- a/ansys/dpf/core/common.py +++ b/ansys/dpf/core/common.py @@ -20,6 +20,20 @@ def _remove_spaces(name): return out +def _make_as_function_name(name): + out = name.lower() + out = out.replace(" ", "_").\ + replace("-", "_").\ + replace("/", "_").\ + replace(".", "_").\ + replace(":", "_").\ + replace(";", "_").\ + replace(",", "_").\ + replace("(", "").\ + replace(")", "") + return out + + def _snake_to_camel_case(name): return "".join(word.title() for word in name.split("_")) @@ -67,7 +81,6 @@ def __write_enum_doc__(enum, intro=None): ), ) - names = [(m.lower(), num - 1) for m, num in field_definition_pb2.ShellLayers.items()] shell_layers = Enum("shell_layers", names) shell_layers.__doc__ = __write_enum_doc__( @@ -127,6 +140,64 @@ class locations: time_freq_step = "TimeFreq_steps" +class elemental_properties: + """Contains strings to define elemental property fields. + + Attributes + ---------- + element_shape = "elshape" + element shape property data is provided + + element_type = "eltype" + element type property data is provided + + connectivity = "connectivity" + connectivity property data is provided + + material = "mat" + material property data is provided + + element_properties = "elprops" + element properties data is provided + + apdl_element_type = "apdl_element_type" + apdl element type property data is provided + """ + element_shape = "elshape" + element_type = "eltype" + connectivity = "connectivity" + material = "mat" + element_properties = "elprops" + apdl_element_type = "apdl_element_type" + + _elemental_property_type_dict = { + element_type: "ELEMENT_TYPE", + element_shape: "ELEMENT_SHAPE", + material: "MATERIAL", + connectivity: "CONNECTIVITY", + } + + +class nodal_properties: + """Contains strings to define nodal property fields. + + Attributes + ---------- + coordinates = "coordinates" + coordinates data is provided + + nodal_connectivity = "reverse_connectivity" + nodal connectivity property data is provided + """ + coordinates = "coordinates" + nodal_connectivity = "reverse_connectivity" + + _nodal_property_type_dict = { + coordinates: "COORDINATES", + nodal_connectivity: "NODAL_CONNECTIVITY", + } + + class DefinitionLabels: """Contains Python definition labels.""" @@ -135,7 +206,6 @@ class DefinitionLabels: def _common_progress_bar(text, unit, tot_size=None): - if tot_size: widgets = [ progressbar.FormatLabel(f"{text}: %(value)d of %(max_value)d {unit} "), @@ -151,3 +221,8 @@ def _common_progress_bar(text, unit, tot_size=None): return progressbar.ProgressBar( widgets=widgets, max_value=progressbar.UnknownLength ) + + +def _common_percentage_progress_bar(text): + widgets = [progressbar.FormatLabel(f'{text}: %(value)d %%'), progressbar.Bar()] + return progressbar.ProgressBar(widgets=widgets, max_value=100) diff --git a/ansys/dpf/core/core.py b/ansys/dpf/core/core.py index a644d1bb4ae..d4ef8677dbf 100644 --- a/ansys/dpf/core/core.py +++ b/ansys/dpf/core/core.py @@ -7,6 +7,7 @@ import time import weakref import pathlib +import sys import grpc @@ -74,15 +75,15 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): Server with channel connected to the remote or local instance. When ``None``, attempts to use the the global server. - Notes - ----- - Print a progress bar - Returns ------- server_file_path : str path generated server side + Notes + ----- + Print a progress bar + Examples -------- >>> from ansys.dpf import core as dpf @@ -95,7 +96,7 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): def upload_files_in_folder( - to_server_folder_path, client_folder_path, specific_extension=None, server=None + to_server_folder_path, client_folder_path, specific_extension=None, server=None ): """Upload all the files from a folder of the client to the target server folder path. @@ -160,7 +161,7 @@ def download_file(server_file_path, to_client_file_path, server=None): def download_files_in_folder( - server_folder_path, to_client_folder_path, specific_extension=None, server=None + server_folder_path, to_client_folder_path, specific_extension=None, server=None ): """Download all the files from a folder of the server to the target client folder path @@ -180,14 +181,15 @@ def download_files_in_folder( Server with channel connected to the remote or local instance. When ``None``, attempts to use the the global server. - Notes - ----- - Print a progress bar - Returns ------- paths : list of str new file paths client side + + Notes + ----- + Print a progress bar + """ base = BaseService(server, load_operators=False) return base.download_files_in_folder( @@ -260,7 +262,10 @@ def _description(dpf_entity_message, server=None): ------- description : str """ - return BaseService(server, load_operators=False)._description(dpf_entity_message) + try: + return BaseService(server, load_operators=False)._description(dpf_entity_message) + except: + return "" @class_handling_cache @@ -312,7 +317,7 @@ def _connect(self, timeout=5): state = grpc.channel_ready_future(self._server().channel) tstart = time.time() while (time.time() - tstart) < timeout and not state._matured: - time.sleep(0.01) + time.sleep(0.005) if not state._matured: raise IOError( @@ -373,7 +378,7 @@ def load_library(self, filename, name="", symbol="LoadOperators"): # TODO: fix code generation upload posix import os - if os.name != "posix": + if self._server().os != 'posix' or (not self._server().os and os.name != 'posix'): local_dir = os.path.dirname(os.path.abspath(__file__)) LOCAL_PATH = os.path.join(local_dir, "operators") @@ -416,9 +421,15 @@ def _get_server_info(self): "server_port": response.port, "server_process_id": response.processId, "server_version": str(response.majorVersion) - + "." - + str(response.minorVersion), + + "." + + str(response.minorVersion), } + if hasattr(response, "properties"): + for key in response.properties: + out[key] = response.properties[key] + else: + out["os"] = None + return out def _description(self, dpf_entity_message): @@ -436,7 +447,11 @@ def _description(self, dpf_entity_message): """ try: request = base_pb2.DescribeRequest() - request.dpf_type_id = dpf_entity_message.id + if isinstance(dpf_entity_message.id, int): + request.dpf_type_id = dpf_entity_message.id + else: + request.dpf_type_id = dpf_entity_message.id.id + return self._stub.Describe(request).description except: return "" @@ -456,34 +471,46 @@ def _get_separator(self, path): def download_file(self, server_file_path, to_client_file_path): """Download a file from the server to the target client file path - Notes - ----- - Print a progress bar - Parameters ---------- server_file_path : str - file path to dowload on the server side + file path to download on the server side to_client_file_path: str file path target where the file will be located client side + + Notes + ----- + Print a progress bar """ request = base_pb2.DownloadFileRequest() request.server_file_path = server_file_path chunks = self._stub.DownloadFile(request) - bar = _common_progress_bar("Downloading...", unit="KB") - bar.start() + bar = None + tot_size = sys.float_info.max + for i in range(0, len(chunks.initial_metadata())): + if chunks.initial_metadata()[i].key == u"size_tot": + tot_size = int(chunks.initial_metadata()[i].value) * 1E-3 + bar = _common_progress_bar("Downloading...", + unit="KB", + tot_size=tot_size) + if not bar: + bar = _common_progress_bar("Downloading...", unit="KB") + bar.start() i = 0 with open(to_client_file_path, "wb") as f: for chunk in chunks: f.write(chunk.data.data) i += len(chunk.data.data) * 1e-3 - bar.update(i) + try: + bar.update(min(i, tot_size)) + except: + pass bar.finish() @protect_grpc def download_files_in_folder( - self, server_folder_path, to_client_folder_path, specific_extension=None + self, server_folder_path, to_client_folder_path, specific_extension=None ): """Download all the files from a folder of the server to the target client folder path @@ -499,15 +526,15 @@ def download_files_in_folder( specific_extension (optional) : str copies only the files with the given extension - Notes - ----- - Print a progress bar - - Returns ------- paths : list of str new file paths client side + + Notes + ----- + Print a progress bar + """ request = base_pb2.DownloadFileRequest() request.server_file_path = server_folder_path @@ -530,8 +557,8 @@ def download_files_in_folder( if chunk.data.server_file_path != server_path: server_path = chunk.data.server_file_path if ( - specific_extension == None - or pathlib.Path(server_path).suffix == "." + specific_extension + specific_extension == None + or pathlib.Path(server_path).suffix == "." + specific_extension ): separator = self._get_separator(server_path) server_subpath = server_path.replace( @@ -573,7 +600,7 @@ def download_files_in_folder( @protect_grpc def upload_files_in_folder( - self, to_server_folder_path, client_folder_path, specific_extension=None + self, to_server_folder_path, client_folder_path, specific_extension=None ): """Upload all the files from a folder of the client to the target server folder path. @@ -618,24 +645,24 @@ def upload_files_in_folder( return server_paths def _upload_and_get_server_path( - self, - specific_extension, - f, - filename, - server_paths, - to_server_folder_path, - subdirectory=None, + self, + specific_extension, + f, + filename, + server_paths, + to_server_folder_path, + subdirectory=None, ): separator = self._get_separator(to_server_folder_path) if subdirectory is not None: to_server_file_path = ( - to_server_folder_path + separator + subdirectory + separator + filename + to_server_folder_path + separator + subdirectory + separator + filename ) else: to_server_file_path = to_server_folder_path + separator + filename if ((specific_extension is not None) and (f.endswith(specific_extension))) or ( - specific_extension is None + specific_extension is None ): server_path = self._stub.UploadFile( self.__file_chunk_yielder( @@ -657,14 +684,14 @@ def upload_file(self, file_path, to_server_file_path): to_server_file_path: str file path target where the file will be located server side - Notes - ----- - Print a progress bar - Returns ------- server_file_path : str path generated server side + + Notes + ----- + Print a progress bar """ if os.stat(file_path).st_size == 0: raise ValueError(file_path + " is empty") diff --git a/ansys/dpf/core/data_sources.py b/ansys/dpf/core/data_sources.py index d9bcbc58493..85c39bf5162 100644 --- a/ansys/dpf/core/data_sources.py +++ b/ansys/dpf/core/data_sources.py @@ -193,18 +193,48 @@ def add_file_path_for_specified_result(self, filepath, key="", result_key=""): request.data_sources.CopyFrom(self._message) self._stub.Update(request) - def add_upstream(self, upstream_data_sources): + def add_upstream(self, upstream_data_sources, result_key=""): """Add upstream data sources. + This is used to add a set of path creating an upstream for + recursive workflows. + Parameters ---------- upstream_data_sources : DataSources Set of paths creating an upstream for recursive workflows. + result_key: str, optional + Extension of the result file group with which this upstream belongs + """ request = data_sources_pb2.UpdateUpstreamRequest() request.upstream_data_sources.CopyFrom(upstream_data_sources._message) request.data_sources.CopyFrom(self._message) + if hasattr(request, "result_key"): + request.result_key = result_key + self._stub.UpdateUpstream(request) + + def add_upstream_for_domain(self, upstream_data_sources, domain_id): + """Add an upstream data sources for a given domain. + + This is used to add a set of path creating an upstream for + recursive workflows in a distributed solve. + + Parameters + ---------- + upstream_data_sources : DataSources + Set of paths creating an upstream for recursive workflows. + + domain_id: int + Domain id for distributed files. + + """ + request = data_sources_pb2.UpdateUpstreamRequest() + request.upstream_data_sources.CopyFrom(upstream_data_sources._message) + request.data_sources.CopyFrom(self._message) + request.domain.domain_path = True + request.domain.domain_id = domain_id self._stub.UpdateUpstream(request) @property diff --git a/ansys/dpf/core/dpf_operator.py b/ansys/dpf/core/dpf_operator.py index 70860e19be9..bea0113053b 100644 --- a/ansys/dpf/core/dpf_operator.py +++ b/ansys/dpf/core/dpf_operator.py @@ -8,8 +8,11 @@ import functools import logging +import re +from typing import NamedTuple from ansys.dpf.core import server as serverlib +from ansys.dpf.core.check_version import version_requires, server_meet_version from ansys.dpf.core.config import Config from ansys.dpf.core.errors import protect_grpc from ansys.dpf.core.inputs import Inputs @@ -34,8 +37,11 @@ class Operator: name : str Name of the operator. For example, ``"U"``. You can use the ``"html_doc"`` operator to retrieve a list of existing operators. - config : ansys.dpf.core.Config, optional - The default is ``None``. + + config : Config, optional + The Configuration allows to customize how the operation + will be processed by the operator. The default is ``None``. + server : server.DPFServer, optional Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global @@ -74,13 +80,16 @@ def __init__(self, name, config=None, server=None): self.__send_init_request(config) + self.__fill_spec() + # add dynamic inputs - if len(self._message.spec.map_input_pin_spec) > 0 and self._inputs == None: - self._inputs = Inputs(self._message.spec.map_input_pin_spec, self) - if len(self._message.spec.map_output_pin_spec) != 0 and self._outputs == None: - self._outputs = Outputs(self._message.spec.map_output_pin_spec, self) + if len(self._spec.inputs) > 0 and self._inputs == None: + self._inputs = Inputs(self._spec.inputs, self) + if len(self._spec.outputs) != 0 and self._outputs == None: + self._outputs = Outputs(self._spec.outputs, self) - self._description = self._message.spec.description + self._description = self._spec.description + self._progress_bar = False def _add_sub_res_operators(self, sub_results): """Dynamically add operators for instantiating subresults. @@ -107,6 +116,17 @@ def _add_sub_res_operators(self, sub_results): method2 = functools.partial(bound_method, name=result_type["operator name"]) setattr(self, result_type["name"], method2) + @property + @version_requires("3.0") + def progress_bar(self) -> bool: + """With this property, the user can choose to print a progress bar when + the operator's output is requested, default is False""" + return self._progress_bar + + @progress_bar.setter + def progress_bar(self, value: bool) -> None: + self._progress_bar = value + @protect_grpc def connect(self, pin, inpt, pin_out=0): """Connect an input on the operator using a pin number. @@ -144,7 +164,7 @@ def connect(self, pin, inpt, pin_out=0): request = operator_pb2.UpdateRequest() request.op.CopyFrom(self._message) request.pin = pin - _fillConnectionRequestMessage(request, inpt, pin_out) + tmp = _fillConnectionRequestMessage(request, inpt, self._server, pin_out) if inpt is self: raise ValueError("Cannot connect to itself.") self._stub.Update(request) @@ -153,6 +173,9 @@ def connect(self, pin, inpt, pin_out=0): def get_output(self, pin=0, output_type=None): """Retrieve the output of the operator on the pin number. + To activate the progress bar for server version higher or equal to 3.0, + use ``my_op.progress_bar=True`` + Parameters ---------- pin : int, optional @@ -170,13 +193,22 @@ def get_output(self, pin=0, output_type=None): request.op.CopyFrom(self._message) request.pin = pin - if output_type is not None: + if output_type: _write_output_type_to_proto_style(output_type, request) - out = self._stub.Get(request) + if server_meet_version("3.0", self._server) and self._progress_bar: + self._server._session.add_operator(self, pin, "workflow") + out_future = self._stub.Get.future(request) + while out_future.is_active(): + if self._progress_bar: + self._server._session.listen_to_progress() + out = out_future.result() + else: + out = self._stub.Get(request) return _convertOutputMessageToPythonInstance(out, output_type, self._server) else: request.type = base_pb2.Type.Value("RUN") - return self._stub.Get(request) + out_future = self._stub.Get.future(request) + out_future.result() @property def config(self): @@ -263,6 +295,8 @@ def default_config(name, server=None): You can change the copy of the default configuration to meet your needs before instantiating the operator. + The Configuration allows to customize how the operation + will be processed by the operator. Parameters ---------- @@ -347,7 +381,7 @@ def eval(self, pin=None): return output() def _find_outputs_corresponding_pins( - self, type_names, inpt, pin, corresponding_pins + self, type_names, inpt, pin, corresponding_pins ): from ansys.dpf.core.results import Result for python_name in type_names: @@ -434,7 +468,7 @@ def __sub__(self, fields_b): def __pow__(self, value): if value != 2: - raise ValueError('Only the value "2" is suppported.') + raise ValueError('Only the value "2" is supported.') from ansys.dpf.core import dpf_operator, operators if hasattr(operators, "math") and hasattr(operators.math, "sqr_fc"): @@ -455,7 +489,7 @@ def __mul__(self, value): from ansys.dpf.core import dpf_operator, operators if hasattr(operators, "math") and hasattr( - operators.math, "generalized_inner_product_fc" + operators.math, "generalized_inner_product_fc" ): op = operators.math.generalized_inner_product_fc(server=self._server) else: @@ -466,6 +500,24 @@ def __mul__(self, value): op.connect(1, value) return op + def __fill_spec(self): + """Put the grpc spec message in self._spec""" + if hasattr(self._message, "spec"): + self._spec = OperatorSpecification._fill_from_message(self.name, self._message.spec) + else: + out = self._stub.List(self._message) + self._spec = OperatorSpecification._fill_from_message(self.name, out.spec) + + @staticmethod + def operator_specification(op_name, server=None): + """Put the grpc spec message in self._spec""" + if server is None: + server = serverlib._global_server() + request = operator_pb2.Operator() + request.name = op_name + out = operator_pb2_grpc.OperatorServiceStub(server.channel).List(request) + return OperatorSpecification._fill_from_message(op_name, out.spec) + def __truediv__(self, inpt): if isinstance(inpt, Operator): op = Operator("div") @@ -478,6 +530,88 @@ def __truediv__(self, inpt): return op +class PinSpecification(NamedTuple): + name: str + type_names: list + optional: bool + document: str + ellipsis: bool + + @staticmethod + def _get_copy(other, changed_types): + return PinSpecification(other.name, + changed_types, + other.optional, + other.document, + other.ellipsis) + + +class OperatorSpecification(NamedTuple): + operator_name: str + description: str + properties: dict + inputs: dict + outputs: dict + + @staticmethod + def _fill_from_message(op_name, message: operator_pb2.Specification): + tmpinputs = {} + for key, inp in message.map_input_pin_spec.items(): + tmpinputs[key] = PinSpecification(inp.name, + inp.type_names, + inp.optional, + inp.document, + inp.ellipsis) + + tmpoutputs = {} + for key, inp in message.map_output_pin_spec.items(): + tmpoutputs[key] = PinSpecification(inp.name, + inp.type_names, + inp.optional, + inp.document, + inp.ellipsis) + + if hasattr(message, "properties"): + properties = dict(message.properties) + else: + properties = dict() + return OperatorSpecification(op_name, + message.description, + properties, + tmpinputs, + tmpoutputs) + + def __str__(self): + out = "" + for key, i in self._asdict().items(): + out += key + ": " + str(i) + "\n\n" + return out + + +def available_operator_names(server=None): + """Returns the list of operator names available in the server. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + + Returns + ------- + list + + """ + if server is None: + server = serverlib._global_server() + service = operator_pb2_grpc.OperatorServiceStub(server.channel).ListAllOperators( + operator_pb2.ListAllOperatorsRequest()) + arr = [] + for chunk in service: + arr.extend(re.split(r'[\x00-\x08]', chunk.array.decode('utf-8'))) + return arr + + def _write_output_type_to_proto_style(output_type, request): subtype = "" stype = "" @@ -491,6 +625,12 @@ def _write_output_type_to_proto_style(output_type, request): elif output_type == types.meshes_container: stype = "collection" subtype = "meshed_region" + elif hasattr(types, "vec_int") and output_type == types.vec_int: + stype = 'collection' + subtype = 'int' + elif hasattr(types, "vec_double") and output_type == types.vec_double: + stype = 'collection' + subtype = 'double' else: stype = output_type.name elif isinstance(output_type, list): @@ -509,6 +649,7 @@ def _convertOutputMessageToPythonInstance(out, output_type, server): data_sources, field, fields_container, + collection, meshed_region, meshes_container, property_field, @@ -516,6 +657,7 @@ def _convertOutputMessageToPythonInstance(out, output_type, server): scoping, scopings_container, time_freq_support, + workflow, ) if out.HasField("str"): @@ -546,6 +688,10 @@ def _convertOutputMessageToPythonInstance(out, output_type, server): return meshes_container.MeshesContainer( server=server, meshes_container=toconvert ) + elif output_type == types.vec_int or output_type == types.vec_double: + return collection.Collection(server=server, + collection=toconvert + )._get_integral_entries() elif out.HasField("scoping"): toconvert = out.scoping return scoping.Scoping(scoping=toconvert, server=server) @@ -566,9 +712,12 @@ def _convertOutputMessageToPythonInstance(out, output_type, server): elif out.HasField("cyc_support"): toconvert = out.cyc_support return cyclic_support.CyclicSupport(server=server, cyclic_support=toconvert) + elif out.HasField("workflow"): + toconvert = out.workflow + return workflow.Workflow(server=server, workflow=toconvert) -def _fillConnectionRequestMessage(request, inpt, pin_out=0): +def _fillConnectionRequestMessage(request, inpt, server, pin_out=0): from ansys.dpf.core import ( collection, cyclic_support, @@ -577,6 +726,7 @@ def _fillConnectionRequestMessage(request, inpt, pin_out=0): meshed_region, model, scoping, + workflow, ) if isinstance(inpt, str): @@ -589,9 +739,22 @@ def _fillConnectionRequestMessage(request, inpt, pin_out=0): request.double = inpt elif isinstance(inpt, list): if all(isinstance(x, int) for x in inpt): - request.vint.rep_int.extend(inpt) + if server_meet_version("3.0", server): + inpt = collection.Collection.integral_collection(inpt, server) + request.collection.CopyFrom(inpt._message) + return inpt + else: + request.vint.rep_int.extend(inpt) elif all(isinstance(x, float) for x in inpt): - request.vdouble.rep_double.extend(inpt) + if server_meet_version("3.0", server): + inpt = collection.Collection.integral_collection(inpt, server) + request.collection.CopyFrom(inpt._message) + return inpt + else: + request.vdouble.rep_double.extend(inpt) + else: + errormsg = f"input type {inpt.__class__} cannot be connected" + raise TypeError(errormsg) elif isinstance(inpt, field_base._FieldBase): request.field.CopyFrom(inpt._message) elif isinstance(inpt, collection.Collection): @@ -606,6 +769,8 @@ def _fillConnectionRequestMessage(request, inpt, pin_out=0): request.mesh.CopyFrom(inpt._message) elif isinstance(inpt, cyclic_support.CyclicSupport): request.cyc_support.CopyFrom(inpt._message) + elif isinstance(inpt, workflow.Workflow): + request.workflow.CopyFrom(inpt._message) elif isinstance(inpt, Operator): request.inputop.inputop.CopyFrom(inpt._message) request.inputop.pinOut = pin_out diff --git a/ansys/dpf/core/elements.py b/ansys/dpf/core/elements.py index 074152577cd..59c81b1a43d 100644 --- a/ansys/dpf/core/elements.py +++ b/ansys/dpf/core/elements.py @@ -6,11 +6,12 @@ from enum import Enum import numpy as np -from ansys.dpf.core import field, nodes, property_field, scoping -from ansys.dpf.core.common import __write_enum_doc__, locations +from ansys.grpc.dpf import meshed_region_pb2 + +from ansys.dpf.core import nodes, scoping +from ansys.dpf.core.common import __write_enum_doc__, locations, elemental_properties from ansys.dpf.core.element_descriptor import ElementDescriptor from ansys.dpf.core.errors import protect_grpc -from ansys.grpc.dpf import meshed_region_pb2 class Element: @@ -163,15 +164,10 @@ def type(self) -> int: """ return self._get_type() - @protect_grpc def _get_type(self): """Retrieve the Ansys element type.""" - - request = meshed_region_pb2.ElementalPropertyRequest() - request.mesh.CopyFrom(self._mesh._message) - request.index = self.index - request.property = meshed_region_pb2.ELEMENT_TYPE - return element_types(self._mesh._stub.GetElementalProperty(request).prop) + prop = self._get_single_property(elemental_properties.element_type) + return element_types(prop) @property def shape(self) -> str: @@ -196,15 +192,27 @@ def shape(self) -> str: """ return self._get_shape() - @protect_grpc def _get_shape(self): """Retrieve the element shape.""" + prop = self._get_single_property(elemental_properties.element_shape) + return meshed_region_pb2.ElementShape.Name(prop).lower() + + @protect_grpc + def _get_single_property(self, property_name): + """Return the element shape""" request = meshed_region_pb2.ElementalPropertyRequest() request.mesh.CopyFrom(self._mesh._message) request.index = self.index - request.property = meshed_region_pb2.ELEMENT_SHAPE - prop = self._mesh._stub.GetElementalProperty(request).prop - return meshed_region_pb2.ElementShape.Name(prop).lower() + if hasattr(request, "property_name"): + request.property_name.property_name = property_name + elif property_name in elemental_properties._elemental_property_type_dict: + request.property = meshed_region_pb2.ElementalPropertyType.Value( + elemental_properties._elemental_property_type_dict[property_name] + ) + else: + raise ValueError(property_name + " property is not supported") + + return self._mesh._stub.GetElementalProperty(request).prop @property def connectivity(self): @@ -479,16 +487,11 @@ def element_types_field(self): >>> model = dpf.Model(examples.static_rst) >>> elements = model.metadata.meshed_region.elements >>> field = elements.element_types_field - >>> field.data - array([1, 1, 1, 1, 1, 1, 1, 1]) + >>> print(field.data) + [1 1 1 1 1 1 1 1] """ - request = meshed_region_pb2.ListPropertyRequest() - request.mesh.CopyFrom(self._mesh._message) - # request.elemental_property = meshed_region_pb2.ElementalPropertyType.ELEMENT_TYPE - request.elemental_property = meshed_region_pb2.ELEMENT_TYPE - fieldOut = self._mesh._stub.ListProperty(request) - return field.Field(server=self._mesh._server, field=fieldOut) + return self._mesh.field_of_properties(elemental_properties.element_type) @property @protect_grpc @@ -508,16 +511,11 @@ def materials_field(self): >>> from ansys.dpf.core import examples >>> model = dpf.Model(examples.static_rst) >>> elements = model.metadata.meshed_region.elements - >>> elements.materials_field.data - array([1, 1, 1, 1, 1, 1, 1, 1]) + >>> print(elements.materials_field.data) + [1 1 1 1 1 1 1 1] """ - request = meshed_region_pb2.ListPropertyRequest() - request.mesh.CopyFrom(self._mesh._message) - # request.elemental_property = meshed_region_pb2.ElementalPropertyType.MATERIAL - request.elemental_property = meshed_region_pb2.MATERIAL - fieldOut = self._mesh._stub.ListProperty(request) - return field.Field(server=self._mesh._server, field=fieldOut) + return self._mesh.field_of_properties(elemental_properties.material) @property def connectivities_field(self): @@ -545,14 +543,7 @@ def connectivities_field(self): @protect_grpc def _get_connectivities_field(self): """Retrieve the connectivities field.""" - request = meshed_region_pb2.ListPropertyRequest() - request.mesh.CopyFrom(self._mesh._message) - # request.elemental_property = meshed_region_pb2.ElementalPropertyType.CONNECTIVITY - request.elemental_property = meshed_region_pb2.CONNECTIVITY - fieldOut = self._mesh._stub.ListProperty(request) - return property_field.PropertyField( - server=self._mesh._server, property_field=fieldOut - ) + return self._mesh.field_of_properties(elemental_properties.connectivity) @property def n_elements(self) -> int: diff --git a/ansys/dpf/core/errors.py b/ansys/dpf/core/errors.py index 2c97615068f..458ad412888 100644 --- a/ansys/dpf/core/errors.py +++ b/ansys/dpf/core/errors.py @@ -71,7 +71,7 @@ def __init__(self, msg=_FIELD_CONTAINER_PLOTTING_MSG): class InvalidANSYSVersionError(RuntimeError): - """Error raised when the Ansys verion is invalid.""" + """Error raised when the Ansys version is invalid.""" def __init__(self, msg=""): RuntimeError.__init__(self, msg) diff --git a/ansys/dpf/core/examples/downloads.py b/ansys/dpf/core/examples/downloads.py index df82dc206ad..b72df6b8a7f 100644 --- a/ansys/dpf/core/examples/downloads.py +++ b/ansys/dpf/core/examples/downloads.py @@ -3,15 +3,15 @@ import os import urllib.request -from ansys.dpf.core import EXAMPLES_PATH EXAMPLE_REPO = "https://github.com/pyansys/example-data/raw/master/result_files/" def delete_downloads(): """Delete all downloaded examples to free space or update the files""" - shutil.rmtree(EXAMPLES_PATH) - os.makedirs(EXAMPLES_PATH) + from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH + shutil.rmtree(LOCAL_DOWNLOADED_EXAMPLES_PATH) + os.makedirs(LOCAL_DOWNLOADED_EXAMPLES_PATH) def _get_file_url(directory, filename): @@ -20,11 +20,14 @@ def _get_file_url(directory, filename): def _retrieve_file(url, filename, directory): """Download a file from a url""" + from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH, path_utilities # First check if file has already been downloaded - local_path = os.path.join(EXAMPLES_PATH, directory, os.path.basename(filename)) + local_path = os.path.join(LOCAL_DOWNLOADED_EXAMPLES_PATH, directory, os.path.basename(filename)) local_path_no_zip = local_path.replace(".zip", "") if os.path.isfile(local_path_no_zip) or os.path.isdir(local_path_no_zip): - return local_path_no_zip + return path_utilities.to_server_os(local_path_no_zip.replace( + LOCAL_DOWNLOADED_EXAMPLES_PATH, + path_utilities.downloaded_example_path())) # grab the correct url retriever urlretrieve = urllib.request.urlretrieve @@ -35,21 +38,14 @@ def _retrieve_file(url, filename, directory): # Perform download _, resp = urlretrieve(url, local_path) - return local_path + return path_utilities.to_server_os(local_path.replace( + LOCAL_DOWNLOADED_EXAMPLES_PATH, + path_utilities.downloaded_example_path())) def _download_file(directory, filename): url = _get_file_url(directory, filename) local_path = _retrieve_file(url, filename, directory) - - if os.environ.get("DPF_DOCKER", False): # pragma: no cover - # override path if running on docker as path must be relative - # to docker mount - # - # Assumes the following mapping in docker - # DWN_CSH=/tmp/dpf_cache - # -v $DWN_CSH:/dpf/_cache - local_path = os.path.join("/dpf/_cache", directory, filename) return local_path @@ -231,7 +227,7 @@ def download_sub_file() -> str: def download_msup_files_to_dict() -> dict: """Download all the files necessary for a msup expansion and return the - download paths into a dictionnary extension->path. + download paths into a dictionary extension->path. Examples files are downloaded to a persistent cache to avoid re-downloading the same file twice. @@ -262,7 +258,7 @@ def download_msup_files_to_dict() -> dict: def download_distributed_files() -> dict: """Download distributed rst files and return the - download paths into a dictionnary domain id->path. + download paths into a dictionary domain id->path. Examples files are downloaded to a persistent cache to avoid re-downloading the same file twice. @@ -291,7 +287,7 @@ def download_distributed_files() -> dict: def download_fluent_files() -> dict: """Download the cas and dat file of a fluent analysis and return the - download paths into a dictionnary extension->path. + download paths into a dictionary extension->path. Examples files are downloaded to a persistent cache to avoid re-downloading the same file twice. @@ -308,8 +304,8 @@ def download_fluent_files() -> dict: >>> from ansys.dpf.core import examples >>> paths = examples.download_fluent_files() >>> paths - {'cas': 'C:\\Users\\cbellot\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent\\FFF.cas.h5', - 'dat': 'C:\\Users\\cbellot\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent\\FFF.dat.h5'} # noqa: E501 + {'cas': 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent\\FFF.cas.h5', + 'dat': 'C:\\Users\\user\\AppData\\Local\\ansys-dpf-core\\ansys-dpf-core\\examples\\fluent\\FFF.dat.h5'} # noqa: E501 """ return { @@ -365,7 +361,7 @@ def download_extrapolation_2d_result() -> dict: Contains path to the example file of ref and path to the example file of integrated points. - Examples + Examples -------- Download 2 example result files and return the dictionary containing 2 files diff --git a/ansys/dpf/core/examples/examples.py b/ansys/dpf/core/examples/examples.py index 876ef5d4588..cc4f3caeada 100644 --- a/ansys/dpf/core/examples/examples.py +++ b/ansys/dpf/core/examples/examples.py @@ -18,3 +18,4 @@ transient_therm = os.path.join(_module_path, "rth", "rth_transient.rth") msup_transient = os.path.join(_module_path, "msup_transient_plate1.rst") simple_cyclic = os.path.join(_module_path, "file_cyclic.rst") +distributed_msup_folder = os.path.join(_module_path, 'msup_distributed') diff --git a/ansys/dpf/core/examples/msup_distributed/file0.mode b/ansys/dpf/core/examples/msup_distributed/file0.mode new file mode 100644 index 00000000000..caf8c7336b4 Binary files /dev/null and b/ansys/dpf/core/examples/msup_distributed/file0.mode differ diff --git a/ansys/dpf/core/examples/msup_distributed/file0.rst b/ansys/dpf/core/examples/msup_distributed/file0.rst new file mode 100644 index 00000000000..6e1e4d65f9a Binary files /dev/null and b/ansys/dpf/core/examples/msup_distributed/file0.rst differ diff --git a/ansys/dpf/core/examples/msup_distributed/file1.mode b/ansys/dpf/core/examples/msup_distributed/file1.mode new file mode 100644 index 00000000000..5c41345d69e Binary files /dev/null and b/ansys/dpf/core/examples/msup_distributed/file1.mode differ diff --git a/ansys/dpf/core/examples/msup_distributed/file1.rst b/ansys/dpf/core/examples/msup_distributed/file1.rst new file mode 100644 index 00000000000..3532d1749c0 Binary files /dev/null and b/ansys/dpf/core/examples/msup_distributed/file1.rst differ diff --git a/ansys/dpf/core/examples/msup_distributed/file_load_1.rfrq b/ansys/dpf/core/examples/msup_distributed/file_load_1.rfrq new file mode 100644 index 00000000000..f539c410fd5 Binary files /dev/null and b/ansys/dpf/core/examples/msup_distributed/file_load_1.rfrq differ diff --git a/ansys/dpf/core/examples/msup_distributed/file_load_2.rfrq b/ansys/dpf/core/examples/msup_distributed/file_load_2.rfrq new file mode 100644 index 00000000000..8db0759e5e3 Binary files /dev/null and b/ansys/dpf/core/examples/msup_distributed/file_load_2.rfrq differ diff --git a/ansys/dpf/core/field.py b/ansys/dpf/core/field.py index 7cc222c8a48..ba41b606b75 100644 --- a/ansys/dpf/core/field.py +++ b/ansys/dpf/core/field.py @@ -443,7 +443,10 @@ def _set_support(self, support, support_type: str): request = field_pb2.SetSupportRequest() request.field.CopyFrom(self._message) request.support.type = base_pb2.Type.Value(support_type) - request.support.id = support._message.id + if isinstance(request.support.id, int): + request.support.id = support._message.id + else: + request.support.id.id = support._message.id.id self._stub.SetSupport(request) @property @@ -496,7 +499,7 @@ def __add__(self, field_b): def __pow__(self, value): if value != 2: - raise ValueError('Only the value "2" is suppported.') + raise ValueError('Only the value "2" is supported.') from ansys.dpf.core import dpf_operator, operators if hasattr(operators, "math") and hasattr(operators.math, "sqr"): diff --git a/ansys/dpf/core/field_base.py b/ansys/dpf/core/field_base.py index feb7cc6cbf1..4ca38fb8e57 100644 --- a/ansys/dpf/core/field_base.py +++ b/ansys/dpf/core/field_base.py @@ -12,13 +12,13 @@ class _FieldBase: """Contains base APIs for all implementations that follow DPF's field concept.""" def __init__( - self, - nentities=0, - nature=natures.vector, - location=locations.nodal, - is_property_field=False, - field=None, - server=None, + self, + nentities=0, + nature=natures.vector, + location=locations.nodal, + is_property_field=False, + field=None, + server=None, ): """Initialize the field either with an optional field message or by connecting to a stub.""" if server is None: @@ -63,10 +63,13 @@ def __init__( def shape(self): """Numpy-like shape of the field. + Returns + ------- + tuple + Examples -------- - tuple - Shape of a stress field. + Shape of a stress field. >>> from ansys.dpf import core as dpf >>> from ansys.dpf.core import examples @@ -418,14 +421,14 @@ def _set_data_pointer(self, data): def data(self): """Data in the field as an array. - Notes - ----- - Print a progress bar. - Returns ------- numpy.ndarray Data in the field. + + Notes + ----- + Print a progress bar. """ return self._get_data() @@ -487,10 +490,10 @@ def _set_data(self, data): else: if isinstance(data, (np.ndarray, np.generic)): if ( - 0 != self.size - and self.component_count > 1 - and data.size // self.component_count - != data.size / self.component_count + 0 != self.size + and self.component_count > 1 + and data.size // self.component_count + != data.size / self.component_count ): raise ValueError( f"An array of shape {self.shape} is expected and " @@ -606,10 +609,10 @@ def get_entity_data(self, index): last_index = self._ncomp * (index + 1) - 1 if self._is_property_field: array = np.array( - self._data_copy[first_index : last_index + 1], dtype=np.int32 + self._data_copy[first_index: last_index + 1], dtype=np.int32 ) else: - array = np.array(self._data_copy[first_index : last_index + 1]) + array = np.array(self._data_copy[first_index: last_index + 1]) if self._ncomp > 1: return array.reshape((array.size // self._ncomp, self._ncomp)) @@ -687,7 +690,7 @@ def append(self, data, scopingid): if not isinstance(data[0], int) and not isinstance(data[0], np.int32): raise errors.InvalidTypeError("data", "list of int") if (len(data) > 0 and isinstance(data, list)) or isinstance( - data, (np.ndarray, np.generic) + data, (np.ndarray, np.generic) ): data = np.array(data).flatten().tolist() diff --git a/ansys/dpf/core/fields_container.py b/ansys/dpf/core/fields_container.py index 94c5985187f..198afe82422 100644 --- a/ansys/dpf/core/fields_container.py +++ b/ansys/dpf/core/fields_container.py @@ -524,7 +524,7 @@ def __sub__(self, fields_b): def __pow__(self, value): if value != 2: - raise ValueError('DPF only the value is "2" suppported') + raise ValueError('DPF only the value is "2" supported') from ansys.dpf.core import dpf_operator from ansys.dpf.core import operators diff --git a/ansys/dpf/core/mapping_types.py b/ansys/dpf/core/mapping_types.py index 7b0666fecca..281b13011d1 100644 --- a/ansys/dpf/core/mapping_types.py +++ b/ansys/dpf/core/mapping_types.py @@ -38,6 +38,7 @@ def __missing__(self, key): for k, v in map_types_to_cpp.items(): map_types_to_python[v] = k map_types_to_python["vector"] = "list" +map_types_to_python["b"] = "bool" map_unit_system = _smart_dict_unit_system() map_unit_system[5] = "Metric (cm, g, dyne, s, V, A)" diff --git a/ansys/dpf/core/meshed_region.py b/ansys/dpf/core/meshed_region.py index 060a2ebd8de..e47eaf77484 100644 --- a/ansys/dpf/core/meshed_region.py +++ b/ansys/dpf/core/meshed_region.py @@ -3,9 +3,9 @@ ============ """ from ansys import dpf -from ansys.dpf.core import scoping +from ansys.dpf.core import scoping, field, property_field from ansys.dpf.core.check_version import server_meet_version -from ansys.dpf.core.common import locations, types +from ansys.dpf.core.common import locations, types, nodal_properties, elemental_properties from ansys.dpf.core.elements import Elements, element_types from ansys.dpf.core.nodes import Nodes from ansys.dpf.core.plotter import Plotter as _DpfPlotter @@ -81,7 +81,10 @@ def __init__(self, num_nodes=None, num_elements=None, mesh=None, server=None): self.__send_init_request(num_nodes, num_elements) else: # support_pb2.Support self._message = meshed_region_pb2.MeshedRegion() - self._message.id = mesh.id + if isinstance(self._message.id, int): + self._message.id = mesh.id + else: + self._message.id.CopyFrom(mesh.id) self._full_grid = None self._elements = None @@ -227,7 +230,12 @@ def _get_available_named_selections(self): ------- named_selections : list str """ - return self._stub.List(self._message).named_selections + if hasattr(self._stub, "ListNamedSelections"): + request = meshed_region_pb2.ListNamedSelectionsRequest() + request.mesh.CopyFrom(self._message) + return self._stub.ListNamedSelections(request).named_selections + else: + return self._stub.List(self._message).named_selections def named_selection(self, named_selection): """Scoping containing the list of nodes or elements in the named selection. @@ -329,7 +337,7 @@ def _as_vtk(self, as_linear=True, include_ids=False): @property def grid(self): - """Unstructured grid in VTK fromat from PyVista. + """Unstructured grid in VTK format from PyVista. Returns ------- @@ -470,6 +478,50 @@ def __send_init_request(self, num_nodes=0, num_elements=0): request.num_elements_reserved = num_elements self._message = self._stub.Create(request) + def field_of_properties(self, property_name): + """Returns the ``Field`` or ``PropertyField`` associated + to a given property of the mesh + + Parameters + ---------- + property_name : str, common.elemental_properties, common.nodal_properties + Name of the property. + + Returns + ------- + properties : Field, PropertyField + + Examples + -------- + >>> import ansys.dpf.core as dpf + >>> from ansys.dpf.core import examples + >>> model = dpf.Model(examples.static_rst) + >>> meshed_region = model.metadata.meshed_region + >>> connectivity = meshed_region.field_of_properties( + ... dpf.common.elemental_properties.connectivity) + >>> coordinates = meshed_region.field_of_properties(dpf.common.nodal_properties.coordinates) + """ + request = meshed_region_pb2.ListPropertyRequest() + request.mesh.CopyFrom(self._message) + if hasattr(request, "property_type"): + request.property_type.property_name.property_name = property_name + elif property_name in nodal_properties._nodal_property_type_dict: + request.nodal_property = meshed_region_pb2.NodalPropertyType.Value( + nodal_properties._nodal_property_type_dict[property_name] + ) + elif property_name in elemental_properties._elemental_property_type_dict: + request.elemental_property = meshed_region_pb2.ElementalPropertyType.Value( + elemental_properties._elemental_property_type_dict[property_name] + ) + else: + raise ValueError(property_name + " property is not supported") + + field_out = self._stub.ListProperty(request) + if field_out.datatype == "int": + return property_field.PropertyField(server=self._server, property_field=field_out) + else: + return field.Field(server=self._server, field=field_out) + _to_cache = { _get_unit: [_set_unit], _get_available_named_selections: None, diff --git a/ansys/dpf/core/meshes_container.py b/ansys/dpf/core/meshes_container.py index cd1ad9b17d5..433fb1fa537 100644 --- a/ansys/dpf/core/meshes_container.py +++ b/ansys/dpf/core/meshes_container.py @@ -54,14 +54,12 @@ def plot(self, fields_container=None, **kwargs): >>> from ansys.dpf.core import examples >>> model = dpf.Model(examples.multishells_rst) >>> mesh = model.metadata.meshed_region - >>> split_mesh_op = dpf.Operator("split_mesh") - >>> split_mesh_op.connect(7, mesh) - >>> split_mesh_op.connect(13, "mat") - >>> meshes_cont = split_mesh_op.outputs.mesh_controller() - >>> disp_op = dpf.Operator("U") - >>> disp_op.connect(7, meshes_cont) - >>> ds = dpf.DataSources(examples.multishells_rst) - >>> disp_op.connect(4, ds) + >>> split_mesh_op = dpf.operators.mesh.split_mesh(mesh=mesh, property="mat") + >>> meshes_cont = split_mesh_op.eval() + >>> disp_op = dpf.operators.result.displacement( + ... data_sources = dpf.DataSources(examples.multishells_rst), + ... mesh = meshes_cont + ... ) >>> disp_fc = disp_op.outputs.fields_container() >>> meshes_cont.plot(disp_fc) diff --git a/ansys/dpf/core/model.py b/ansys/dpf/core/model.py index 15b0b682b21..372a3e546ff 100644 --- a/ansys/dpf/core/model.py +++ b/ansys/dpf/core/model.py @@ -51,6 +51,7 @@ def __init__(self, data_sources=None, server=None): self._server = server self._metadata = None self._results = None + self._mesh_by_default = True @property def metadata(self): @@ -96,7 +97,7 @@ def metadata(self): >>> rinfo = model.metadata.result_info >>> rinfo.unit_system - 'Metric (m, kg, N, s, V, A)' + 'MKS: m, kg, N, s, V, A, degC' """ if not self._metadata: @@ -124,13 +125,10 @@ def results(self): Examples -------- - Create a stress result from the model and choose its time and mesh scopings. - >>> from ansys.dpf import core as dpf >>> from ansys.dpf.core import examples >>> model = dpf.Model(examples.electric_therm) >>> v = model.results.electric_potential - >>> rf = model.results.reaction_force >>> dissip = model.results.thermal_dissipation_energy Examples @@ -169,10 +167,13 @@ def __connect_op__(self, op): if self.metadata._stream_provider is not None and hasattr(op.inputs, "streams"): op.inputs.streams.connect(self.metadata._stream_provider.outputs) elif self.metadata._data_sources is not None and hasattr( - op.inputs, "data_sources" + op.inputs, "data_sources" ): op.inputs.data_sources.connect(self.metadata._data_sources) + if self.mesh_by_default and self.metadata.mesh_provider and hasattr(op.inputs, "mesh"): + op.inputs.mesh.connect(self.metadata.mesh_provider) + def operator(self, name): """Operator associated with the data sources of this model. @@ -228,13 +229,24 @@ def plot(self, color="w", show_edges=True, **kwargs): color=color, show_edges=show_edges, **kwargs ) + @property + def mesh_by_default(self): + """If true, the mesh is connected by default to operators + supporting the mesh input + """ + return self._mesh_by_default + + @mesh_by_default.setter + def mesh_by_default(self, value): + self._mesh_by_default = value + class Metadata: """Contains the metadata of a data source. Parameters ---------- - data_sources : + data_sources : DataSources server : server.DPFServer Server with the channel connected to the remote or local instance. @@ -260,7 +272,7 @@ def _cache_streams_provider(self): from ansys.dpf.core import operators if hasattr(operators, "metadata") and hasattr( - operators.metadata, "stream_provider" + operators.metadata, "stream_provider" ): self._stream_provider = operators.metadata.streams_provider( data_sources=self._data_sources, server=self._server @@ -268,6 +280,10 @@ def _cache_streams_provider(self): else: self._stream_provider = Operator("stream_provider", server=self._server) self._stream_provider.inputs.connect(self._data_sources) + try: + self._stream_provider.run() + except: + self._stream_provider = None @property @protect_source_op_not_found @@ -306,7 +322,10 @@ def time_freq_support(self): """ if self._time_freq_support is None: timeProvider = Operator("TimeFreqSupportProvider", server=self._server) - timeProvider.inputs.connect(self._stream_provider.outputs) + if self._stream_provider: + timeProvider.inputs.connect(self._stream_provider.outputs) + else: + timeProvider.inputs.connect(self.data_sources) self._time_freq_support = timeProvider.get_output( 0, types.time_freq_support ) @@ -385,6 +404,8 @@ def _load_result_info(self): raise RuntimeError("Unable to open result file") from None else: raise e + except: + return None return result_info @property @@ -426,7 +447,10 @@ def mesh_provider(self): except: pass mesh_provider = Operator("MeshProvider", server=self._server) - mesh_provider.inputs.connect(self._stream_provider.outputs) + if self._stream_provider: + mesh_provider.inputs.connect(self._stream_provider.outputs) + else: + mesh_provider.inputs.connect(self.data_sources) return mesh_provider @property diff --git a/ansys/dpf/core/nodes.py b/ansys/dpf/core/nodes.py index a6cd943e084..b813f643142 100644 --- a/ansys/dpf/core/nodes.py +++ b/ansys/dpf/core/nodes.py @@ -4,11 +4,10 @@ """ import numpy as np - +from ansys.grpc.dpf import meshed_region_pb2 from ansys import dpf -from ansys.dpf.core import field, property_field -from ansys.grpc.dpf import meshed_region_pb2 +from ansys.dpf.core.common import nodal_properties from ansys.dpf.core.errors import protect_grpc @@ -244,23 +243,12 @@ def nodal_connectivity_field(self): array([0, 2, 4, 6]) """ - request = meshed_region_pb2.ListPropertyRequest() - request.mesh.CopyFrom(self._mesh._message) - request.nodal_property = meshed_region_pb2.NODAL_CONNECTIVITY - fieldOut = self._mesh._stub.ListProperty(request) - return property_field.PropertyField( - server=self._mesh._server, property_field=fieldOut - ) + return self._mesh.field_of_properties(nodal_properties.nodal_connectivity) @protect_grpc def _get_coordinates_field(self): """Retrieve the coordinates field.""" - request = meshed_region_pb2.ListPropertyRequest() - request.mesh.CopyFrom(self._mesh._message) - # request.nodal_property = meshed_region_pb2.NodalPropertyType.COORDINATES - request.nodal_property = meshed_region_pb2.COORDINATES - fieldOut = self._mesh._stub.ListProperty(request) - return field.Field(server=self._mesh._server, field=fieldOut) + return self._mesh.field_of_properties(nodal_properties.coordinates) def _build_mapping_id_to_index(self): """Retrieve a mapping between IDs and indices of the entity.""" diff --git a/ansys/dpf/core/operators/__init__.py b/ansys/dpf/core/operators/__init__.py index 24aa5fc388f..a933dff7b3a 100644 --- a/ansys/dpf/core/operators/__init__.py +++ b/ansys/dpf/core/operators/__init__.py @@ -1,22 +1,14 @@ -""" -.. _ref_operators_package: - -ansys.dpf.core.operators -======================== -""" - - from . import result from . import math +from . import utility from . import min_max from . import scoping -from . import utility from . import metadata from . import logic from . import mesh from . import filter from . import serialization -from . import averaging from . import geo +from . import averaging from . import invariant from . import mapping diff --git a/ansys/dpf/core/operators/averaging/__init__.py b/ansys/dpf/core/operators/averaging/__init__.py index 4516933a8ba..537b956e104 100644 --- a/ansys/dpf/core/operators/averaging/__init__.py +++ b/ansys/dpf/core/operators/averaging/__init__.py @@ -1,13 +1,15 @@ -from .nodal_fraction_fc import nodal_fraction_fc -from .elemental_nodal_to_nodal_elemental_fc import elemental_nodal_to_nodal_elemental_fc from .elemental_difference import elemental_difference from .elemental_nodal_to_nodal import elemental_nodal_to_nodal -from .elemental_difference_fc import elemental_difference_fc from .elemental_nodal_to_nodal_fc import elemental_nodal_to_nodal_fc from .elemental_to_nodal import elemental_to_nodal from .elemental_to_nodal_fc import elemental_to_nodal_fc +from .elemental_to_elemental_nodal import elemental_to_elemental_nodal from .nodal_difference import nodal_difference +from .elemental_to_elemental_nodal_fc import elemental_to_elemental_nodal_fc from .nodal_difference_fc import nodal_difference_fc +from .elemental_difference_fc import elemental_difference_fc +from .nodal_fraction_fc import nodal_fraction_fc +from .elemental_nodal_to_nodal_elemental_fc import elemental_nodal_to_nodal_elemental_fc from .elemental_fraction_fc import elemental_fraction_fc from .to_nodal import to_nodal from .to_nodal_fc import to_nodal_fc diff --git a/ansys/dpf/core/operators/averaging/elemental_difference.py b/ansys/dpf/core/operators/averaging/elemental_difference.py index c4414193cef..e97c52848b4 100644 --- a/ansys/dpf/core/operators/averaging/elemental_difference.py +++ b/ansys/dpf/core/operators/averaging/elemental_difference.py @@ -1,78 +1,148 @@ """ elemental_difference -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_difference(Operator): - """Transform ElementalNodal or Nodal field into Elemental field. Each elemental value is the maximum difference between the computed result for all nodes in this element. Result is computed on a given element scoping. - - available inputs: - - field (Field, FieldsContainer) - - mesh_scoping (Scoping) (optional) - - mesh (MeshedRegion) (optional) - - through_layers (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_difference() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_through_layers = bool() - >>> op.inputs.through_layers.connect(my_through_layers) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_difference(field=my_field,mesh_scoping=my_mesh_scoping,mesh=my_mesh,through_layers=my_through_layers) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, field=None, mesh_scoping=None, mesh=None, through_layers=None, config=None, server=None): - super().__init__(name="elemental_difference", config = config, server = server) + """Transform ElementalNodal or Nodal field into Elemental field. Each + elemental value is the maximum difference between the computed + result for all nodes in this element. Result is computed on a + given element scoping. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : Scoping, optional + Average only on these entities + mesh : MeshedRegion, optional + through_layers : bool, optional + The max elemental difference is taken through + the different shell layers if true + (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_difference() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_through_layers = bool() + >>> op.inputs.through_layers.connect(my_through_layers) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_difference( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... mesh=my_mesh, + ... through_layers=my_through_layers, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + field=None, + mesh_scoping=None, + mesh=None, + through_layers=None, + config=None, + server=None, + ): + super().__init__(name="elemental_difference", config=config, server=server) self._inputs = InputsElementalDifference(self) self._outputs = OutputsElementalDifference(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if through_layers !=None: + if through_layers is not None: self.inputs.through_layers.connect(through_layers) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal or Nodal field into Elemental field. Each elemental value is the maximum difference between the computed result for all nodes in this element. Result is computed on a given element scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""average only on these entities"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document=""""""), - 10 : PinSpecification(name = "through_layers", type_names=["bool"], optional=True, document="""the max elemental difference is taken through the different shell layers if true (default is false)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal or Nodal field into Elemental field. Each + elemental value is the maximum difference between the + computed result for all nodes in this element. Result is + computed on a given element scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Average only on these entities""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 10: PinSpecification( + name="through_layers", + type_names=["bool"], + optional=True, + document="""The max elemental difference is taken through + the different shell layers if true + (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "elemental_difference") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="elemental_difference", server=server) @property def inputs(self): @@ -80,169 +150,164 @@ def inputs(self): Returns -------- - inputs : InputsElementalDifference + inputs : InputsElementalDifference """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalDifference + outputs : OutputsElementalDifference """ return super().outputs -#internal name: elemental_difference -#scripting name: elemental_difference class InputsElementalDifference(_Inputs): - """Intermediate class used to connect user inputs to elemental_difference operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_difference() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_through_layers = bool() - >>> op.inputs.through_layers.connect(my_through_layers) + """Intermediate class used to connect user inputs to + elemental_difference operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_difference() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_through_layers = bool() + >>> op.inputs.through_layers.connect(my_through_layers) """ + def __init__(self, op: Operator): super().__init__(elemental_difference._spec().inputs, op) - self._field = Input(elemental_difference._spec().input_pin(0), 0, op, -1) + self._field = Input(elemental_difference._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._mesh_scoping = Input(elemental_difference._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elemental_difference._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._mesh = Input(elemental_difference._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elemental_difference._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._through_layers = Input(elemental_difference._spec().input_pin(10), 10, op, -1) + self._through_layers = Input( + elemental_difference._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._through_layers) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: average only on these entities + Average only on these entities Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def through_layers(self): - """Allows to connect through_layers input to the operator + """Allows to connect through_layers input to the operator. - - pindoc: the max elemental difference is taken through the different shell layers if true (default is false) + The max elemental difference is taken through + the different shell layers if true + (default is false) Parameters ---------- - my_through_layers : bool, + my_through_layers : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference() >>> op.inputs.through_layers.connect(my_through_layers) - >>> #or + >>> # or >>> op.inputs.through_layers(my_through_layers) - """ return self._through_layers + class OutputsElementalDifference(_Outputs): - """Intermediate class used to get outputs from elemental_difference operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_difference() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_difference operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_difference() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(elemental_difference._spec().outputs, op) - self._fields_container = Output(elemental_difference._spec().output_pin(0), 0, op) - self._outputs.append(self._fields_container) + self._field = Output(elemental_difference._spec().output_pin(0), 0, op) + self._outputs.append(self._field) @property - def fields_container(self): - """Allows to get fields_container output of the operator - + def field(self): + """Allows to get field output of the operator Returns ---------- - my_fields_container : FieldsContainer, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - return self._fields_container - + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/averaging/elemental_difference_fc.py b/ansys/dpf/core/operators/averaging/elemental_difference_fc.py index 47cea93a301..fdb63d22422 100644 --- a/ansys/dpf/core/operators/averaging/elemental_difference_fc.py +++ b/ansys/dpf/core/operators/averaging/elemental_difference_fc.py @@ -1,78 +1,163 @@ """ elemental_difference_fc -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_difference_fc(Operator): - """Transform ElementalNodal or Nodal field into Elemental field. Each elemental value is the maximum difference between the unaveraged or averaged (depending on the input fields) computed result for all nodes in this element. Result is computed on a given element scoping. If the input fields are mixed shell/solid and the shells layers are not asked to be collapsed, then the fields are splitted by element shape and the output fields container has elshape label. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion, MeshesContainer) (optional) - - scoping (Scoping, ScopingsContainer) (optional) - - collapse_shell_layers (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_difference_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_difference_fc(fields_container=my_fields_container,mesh=my_mesh,scoping=my_scoping,collapse_shell_layers=my_collapse_shell_layers) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, scoping=None, collapse_shell_layers=None, config=None, server=None): - super().__init__(name="elemental_difference_fc", config = config, server = server) + """Transform ElementalNodal or Nodal field into Elemental field. Each + elemental value is the maximum difference between the unaveraged + or averaged (depending on the input fields) computed result for + all nodes in this element. Result is computed on a given element + scoping. If the input fields are mixed shell/solid and the shells + layers are not asked to be collapsed, then the fields are split by + element shape and the output fields container has elshape label. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion or MeshesContainer, optional + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used + scoping : Scoping or ScopingsContainer, optional + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container + collapse_shell_layers : bool, optional + The max elemental difference is taken through + the different shell layers if true + (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_difference_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_difference_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... scoping=my_scoping, + ... collapse_shell_layers=my_collapse_shell_layers, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + scoping=None, + collapse_shell_layers=None, + config=None, + server=None, + ): + super().__init__(name="elemental_difference_fc", config=config, server=server) self._inputs = InputsElementalDifferenceFc(self) self._outputs = OutputsElementalDifferenceFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if collapse_shell_layers !=None: + if collapse_shell_layers is not None: self.inputs.collapse_shell_layers.connect(collapse_shell_layers) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal or Nodal field into Elemental field. Each elemental value is the maximum difference between the unaveraged or averaged (depending on the input fields) computed result for all nodes in this element. Result is computed on a given element scoping. If the input fields are mixed shell/solid and the shells layers are not asked to be collapsed, then the fields are splitted by element shape and the output fields container has elshape label.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""the mesh region in this pin is used to perform the averaging, if there is no field's support it is used"""), - 3 : PinSpecification(name = "scoping", type_names=["scoping","scopings_container"], optional=True, document="""average only on these elements, if it is scoping container, the label must correspond to the one of the fields container"""), - 10 : PinSpecification(name = "collapse_shell_layers", type_names=["bool"], optional=True, document="""the max elemental difference is taken through the different shell layers if true (default is false)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal or Nodal field into Elemental field. Each + elemental value is the maximum difference between the + unaveraged or averaged (depending on the input fields) + computed result for all nodes in this element. Result is + computed on a given element scoping. If the input fields + are mixed shell/solid and the shells layers are not asked + to be collapsed, then the fields are split by element + shape and the output fields container has elshape label.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping", "scopings_container"], + optional=True, + document="""Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container""", + ), + 10: PinSpecification( + name="collapse_shell_layers", + type_names=["bool"], + optional=True, + document="""The max elemental difference is taken through + the different shell layers if true + (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "elemental_difference_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="elemental_difference_fc", server=server) @property def inputs(self): @@ -80,169 +165,172 @@ def inputs(self): Returns -------- - inputs : InputsElementalDifferenceFc + inputs : InputsElementalDifferenceFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalDifferenceFc + outputs : OutputsElementalDifferenceFc """ return super().outputs -#internal name: elemental_difference_fc -#scripting name: elemental_difference_fc class InputsElementalDifferenceFc(_Inputs): - """Intermediate class used to connect user inputs to elemental_difference_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_difference_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + """Intermediate class used to connect user inputs to + elemental_difference_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_difference_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) """ + def __init__(self, op: Operator): super().__init__(elemental_difference_fc._spec().inputs, op) - self._fields_container = Input(elemental_difference_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + elemental_difference_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(elemental_difference_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(elemental_difference_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._scoping = Input(elemental_difference_fc._spec().input_pin(3), 3, op, -1) + self._scoping = Input(elemental_difference_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._scoping) - self._collapse_shell_layers = Input(elemental_difference_fc._spec().input_pin(10), 10, op, -1) + self._collapse_shell_layers = Input( + elemental_difference_fc._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._collapse_shell_layers) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the mesh region in this pin is used to perform the averaging, if there is no field's support it is used + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: average only on these elements, if it is scoping container, the label must correspond to the one of the fields container + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container Parameters ---------- - my_scoping : Scoping, ScopingsContainer, + my_scoping : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference_fc() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def collapse_shell_layers(self): - """Allows to connect collapse_shell_layers input to the operator + """Allows to connect collapse_shell_layers input to the operator. - - pindoc: the max elemental difference is taken through the different shell layers if true (default is false) + The max elemental difference is taken through + the different shell layers if true + (default is false) Parameters ---------- - my_collapse_shell_layers : bool, + my_collapse_shell_layers : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference_fc() >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> #or + >>> # or >>> op.inputs.collapse_shell_layers(my_collapse_shell_layers) - """ return self._collapse_shell_layers + class OutputsElementalDifferenceFc(_Outputs): - """Intermediate class used to get outputs from elemental_difference_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_difference_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_difference_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_difference_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elemental_difference_fc._spec().outputs, op) - self._fields_container = Output(elemental_difference_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + elemental_difference_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_difference_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py b/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py index be31f4f3b95..e1c0c42359b 100644 --- a/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py +++ b/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py @@ -1,84 +1,175 @@ """ elemental_fraction_fc -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_fraction_fc(Operator): - """Transform ElementalNodal fields into Elemental fields. Each elemental value is the fraction between the elemental difference and the entity average. Result is computed on a given elements scoping. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion) (optional) - - scoping (Scoping) (optional) - - denominator (FieldsContainer) (optional) - - collapse_shell_layers (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_fraction_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_denominator = dpf.FieldsContainer() - >>> op.inputs.denominator.connect(my_denominator) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_fraction_fc(fields_container=my_fields_container,mesh=my_mesh,scoping=my_scoping,denominator=my_denominator,collapse_shell_layers=my_collapse_shell_layers) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, scoping=None, denominator=None, collapse_shell_layers=None, config=None, server=None): - super().__init__(name="elemental_fraction_fc", config = config, server = server) + """Transform ElementalNodal fields into Elemental fields. Each elemental + value is the fraction between the elemental difference and the + entity average. Result is computed on a given elements scoping. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion, optional + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used + scoping : Scoping, optional + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container + denominator : FieldsContainer, optional + If a fields container is set in this pin, it + is used as the denominator of the + fraction instead of entity_average_fc + collapse_shell_layers : bool, optional + The elemental difference and the entity + average are taken through the + different shell layers if true + (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_fraction_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_denominator = dpf.FieldsContainer() + >>> op.inputs.denominator.connect(my_denominator) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_fraction_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... scoping=my_scoping, + ... denominator=my_denominator, + ... collapse_shell_layers=my_collapse_shell_layers, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + scoping=None, + denominator=None, + collapse_shell_layers=None, + config=None, + server=None, + ): + super().__init__(name="elemental_fraction_fc", config=config, server=server) self._inputs = InputsElementalFractionFc(self) self._outputs = OutputsElementalFractionFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if denominator !=None: + if denominator is not None: self.inputs.denominator.connect(denominator) - if collapse_shell_layers !=None: + if collapse_shell_layers is not None: self.inputs.collapse_shell_layers.connect(collapse_shell_layers) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal fields into Elemental fields. Each elemental value is the fraction between the elemental difference and the entity average. Result is computed on a given elements scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""the mesh region in this pin is used to perform the averaging, if there is no field's support it is used"""), - 3 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document="""average only on these elements, if it is scoping container, the label must correspond to the one of the fields container"""), - 6 : PinSpecification(name = "denominator", type_names=["fields_container"], optional=True, document="""if a fields container is set in this pin, it is used as the denominator of the fraction instead of entity_average_fc"""), - 10 : PinSpecification(name = "collapse_shell_layers", type_names=["bool"], optional=True, document="""the elemental difference and the entity average are taken through the different shell layers if true (default is false)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal fields into Elemental fields. Each elemental + value is the fraction between the elemental difference and + the entity average. Result is computed on a given elements + scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container""", + ), + 6: PinSpecification( + name="denominator", + type_names=["fields_container"], + optional=True, + document="""If a fields container is set in this pin, it + is used as the denominator of the + fraction instead of entity_average_fc""", + ), + 10: PinSpecification( + name="collapse_shell_layers", + type_names=["bool"], + optional=True, + document="""The elemental difference and the entity + average are taken through the + different shell layers if true + (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "elemental_fraction_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="elemental_fraction_fc", server=server) @property def inputs(self): @@ -86,195 +177,199 @@ def inputs(self): Returns -------- - inputs : InputsElementalFractionFc + inputs : InputsElementalFractionFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalFractionFc + outputs : OutputsElementalFractionFc """ return super().outputs -#internal name: elemental_fraction_fc -#scripting name: elemental_fraction_fc class InputsElementalFractionFc(_Inputs): - """Intermediate class used to connect user inputs to elemental_fraction_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_fraction_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_denominator = dpf.FieldsContainer() - >>> op.inputs.denominator.connect(my_denominator) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + """Intermediate class used to connect user inputs to + elemental_fraction_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_fraction_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_denominator = dpf.FieldsContainer() + >>> op.inputs.denominator.connect(my_denominator) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) """ + def __init__(self, op: Operator): super().__init__(elemental_fraction_fc._spec().inputs, op) - self._fields_container = Input(elemental_fraction_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + elemental_fraction_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(elemental_fraction_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(elemental_fraction_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._scoping = Input(elemental_fraction_fc._spec().input_pin(3), 3, op, -1) + self._scoping = Input(elemental_fraction_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._scoping) - self._denominator = Input(elemental_fraction_fc._spec().input_pin(6), 6, op, -1) + self._denominator = Input(elemental_fraction_fc._spec().input_pin(6), 6, op, -1) self._inputs.append(self._denominator) - self._collapse_shell_layers = Input(elemental_fraction_fc._spec().input_pin(10), 10, op, -1) + self._collapse_shell_layers = Input( + elemental_fraction_fc._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._collapse_shell_layers) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_fraction_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the mesh region in this pin is used to perform the averaging, if there is no field's support it is used + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_fraction_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: average only on these elements, if it is scoping container, the label must correspond to the one of the fields container + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_fraction_fc() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def denominator(self): - """Allows to connect denominator input to the operator + """Allows to connect denominator input to the operator. - - pindoc: if a fields container is set in this pin, it is used as the denominator of the fraction instead of entity_average_fc + If a fields container is set in this pin, it + is used as the denominator of the + fraction instead of entity_average_fc Parameters ---------- - my_denominator : FieldsContainer, + my_denominator : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_fraction_fc() >>> op.inputs.denominator.connect(my_denominator) - >>> #or + >>> # or >>> op.inputs.denominator(my_denominator) - """ return self._denominator @property def collapse_shell_layers(self): - """Allows to connect collapse_shell_layers input to the operator + """Allows to connect collapse_shell_layers input to the operator. - - pindoc: the elemental difference and the entity average are taken through the different shell layers if true (default is false) + The elemental difference and the entity + average are taken through the + different shell layers if true + (default is false) Parameters ---------- - my_collapse_shell_layers : bool, + my_collapse_shell_layers : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_fraction_fc() >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> #or + >>> # or >>> op.inputs.collapse_shell_layers(my_collapse_shell_layers) - """ return self._collapse_shell_layers + class OutputsElementalFractionFc(_Outputs): - """Intermediate class used to get outputs from elemental_fraction_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_fraction_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_fraction_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_fraction_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elemental_fraction_fc._spec().outputs, op) - self._fields_container = Output(elemental_fraction_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + elemental_fraction_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_fraction_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/elemental_mean.py b/ansys/dpf/core/operators/averaging/elemental_mean.py index f1b6e486bfc..26629416619 100644 --- a/ansys/dpf/core/operators/averaging/elemental_mean.py +++ b/ansys/dpf/core/operators/averaging/elemental_mean.py @@ -1,78 +1,146 @@ """ elemental_mean -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_mean(Operator): - """Computes the average of a multi-entity fields, (ElementalNodal -> Elemental), (NodalElemental -> Nodal). - - available inputs: - - field (Field) - - collapse_shell_layers (bool) (optional) - - force_averaging (bool) (optional) - - scoping (Scoping) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_mean() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> my_force_averaging = bool() - >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_mean(field=my_field,collapse_shell_layers=my_collapse_shell_layers,force_averaging=my_force_averaging,scoping=my_scoping) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, collapse_shell_layers=None, force_averaging=None, scoping=None, config=None, server=None): - super().__init__(name="entity_average", config = config, server = server) + """Computes the average of a multi-entity fields, (ElementalNodal -> + Elemental), (NodalElemental -> Nodal). + + Parameters + ---------- + field : Field + collapse_shell_layers : bool, optional + If true shell layers are averaged as well + (default is false) + force_averaging : bool, optional + If true you average, if false you just sum + scoping : Scoping, optional + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_mean() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + >>> my_force_averaging = bool() + >>> op.inputs.force_averaging.connect(my_force_averaging) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_mean( + ... field=my_field, + ... collapse_shell_layers=my_collapse_shell_layers, + ... force_averaging=my_force_averaging, + ... scoping=my_scoping, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + field=None, + collapse_shell_layers=None, + force_averaging=None, + scoping=None, + config=None, + server=None, + ): + super().__init__(name="entity_average", config=config, server=server) self._inputs = InputsElementalMean(self) self._outputs = OutputsElementalMean(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if collapse_shell_layers !=None: + if collapse_shell_layers is not None: self.inputs.collapse_shell_layers.connect(collapse_shell_layers) - if force_averaging !=None: + if force_averaging is not None: self.inputs.force_averaging.connect(force_averaging) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) @staticmethod def _spec(): - spec = Specification(description="""Computes the average of a multi-entity fields, (ElementalNodal -> Elemental), (NodalElemental -> Nodal).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "collapse_shell_layers", type_names=["bool"], optional=True, document="""if true shell layers are averaged as well (default is false)"""), - 2 : PinSpecification(name = "force_averaging", type_names=["bool"], optional=True, document="""if true you average, if false you just sum"""), - 3 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document="""average only on these elements, if it is scoping container, the label must correspond to the one of the fields container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes the average of a multi-entity fields, (ElementalNodal -> + Elemental), (NodalElemental -> Nodal).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="collapse_shell_layers", + type_names=["bool"], + optional=True, + document="""If true shell layers are averaged as well + (default is false)""", + ), + 2: PinSpecification( + name="force_averaging", + type_names=["bool"], + optional=True, + document="""If true you average, if false you just sum""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "entity_average") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="entity_average", server=server) @property def inputs(self): @@ -80,169 +148,165 @@ def inputs(self): Returns -------- - inputs : InputsElementalMean + inputs : InputsElementalMean """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalMean + outputs : OutputsElementalMean """ return super().outputs -#internal name: entity_average -#scripting name: elemental_mean class InputsElementalMean(_Inputs): - """Intermediate class used to connect user inputs to elemental_mean operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_mean() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> my_force_averaging = bool() - >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) + """Intermediate class used to connect user inputs to + elemental_mean operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_mean() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + >>> my_force_averaging = bool() + >>> op.inputs.force_averaging.connect(my_force_averaging) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) """ + def __init__(self, op: Operator): super().__init__(elemental_mean._spec().inputs, op) - self._field = Input(elemental_mean._spec().input_pin(0), 0, op, -1) + self._field = Input(elemental_mean._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._collapse_shell_layers = Input(elemental_mean._spec().input_pin(1), 1, op, -1) + self._collapse_shell_layers = Input( + elemental_mean._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._collapse_shell_layers) - self._force_averaging = Input(elemental_mean._spec().input_pin(2), 2, op, -1) + self._force_averaging = Input(elemental_mean._spec().input_pin(2), 2, op, -1) self._inputs.append(self._force_averaging) - self._scoping = Input(elemental_mean._spec().input_pin(3), 3, op, -1) + self._scoping = Input(elemental_mean._spec().input_pin(3), 3, op, -1) self._inputs.append(self._scoping) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def collapse_shell_layers(self): - """Allows to connect collapse_shell_layers input to the operator + """Allows to connect collapse_shell_layers input to the operator. - - pindoc: if true shell layers are averaged as well (default is false) + If true shell layers are averaged as well + (default is false) Parameters ---------- - my_collapse_shell_layers : bool, + my_collapse_shell_layers : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean() >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> #or + >>> # or >>> op.inputs.collapse_shell_layers(my_collapse_shell_layers) - """ return self._collapse_shell_layers @property def force_averaging(self): - """Allows to connect force_averaging input to the operator + """Allows to connect force_averaging input to the operator. - - pindoc: if true you average, if false you just sum + If true you average, if false you just sum Parameters ---------- - my_force_averaging : bool, + my_force_averaging : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean() >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> #or + >>> # or >>> op.inputs.force_averaging(my_force_averaging) - """ return self._force_averaging @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: average only on these elements, if it is scoping container, the label must correspond to the one of the fields container + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping + class OutputsElementalMean(_Outputs): - """Intermediate class used to get outputs from elemental_mean operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_mean() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + elemental_mean operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_mean() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(elemental_mean._spec().outputs, op) - self._field = Output(elemental_mean._spec().output_pin(0), 0, op) + self._field = Output(elemental_mean._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/averaging/elemental_mean_fc.py b/ansys/dpf/core/operators/averaging/elemental_mean_fc.py index d5a3ee266d1..049ba1e662e 100644 --- a/ansys/dpf/core/operators/averaging/elemental_mean_fc.py +++ b/ansys/dpf/core/operators/averaging/elemental_mean_fc.py @@ -1,84 +1,171 @@ """ elemental_mean_fc -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_mean_fc(Operator): - """Computes the average of a multi-entity container of fields, (ElementalNodal -> Elemental), (NodalElemental -> Nodal). If the input fields are mixed shell/solid and collapseShellLayers is not asked, then the fields are splitted by element shape and the output fields container has elshape label. - - available inputs: - - fields_container (FieldsContainer) - - collapse_shell_layers (bool) (optional) - - force_averaging (bool) (optional) - - scoping (Scoping) (optional) - - meshed_region (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_mean_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> my_force_averaging = bool() - >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_mean_fc(fields_container=my_fields_container,collapse_shell_layers=my_collapse_shell_layers,force_averaging=my_force_averaging,scoping=my_scoping,meshed_region=my_meshed_region) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, collapse_shell_layers=None, force_averaging=None, scoping=None, meshed_region=None, config=None, server=None): - super().__init__(name="entity_average_fc", config = config, server = server) + """Computes the average of a multi-entity container of fields, + (ElementalNodal -> Elemental), (NodalElemental -> Nodal). If the + input fields are mixed shell/solid and collapseShellLayers is not + asked, then the fields are split by element shape and the output + fields container has elshape label. + + Parameters + ---------- + fields_container : FieldsContainer + collapse_shell_layers : bool, optional + If true shell layers are averaged as well + (default is false) + force_averaging : bool, optional + If true you average, if false you just sum + scoping : Scoping, optional + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container + abstract_meshed_region : MeshedRegion, optional + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_mean_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + >>> my_force_averaging = bool() + >>> op.inputs.force_averaging.connect(my_force_averaging) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_mean_fc( + ... fields_container=my_fields_container, + ... collapse_shell_layers=my_collapse_shell_layers, + ... force_averaging=my_force_averaging, + ... scoping=my_scoping, + ... abstract_meshed_region=my_abstract_meshed_region, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + collapse_shell_layers=None, + force_averaging=None, + scoping=None, + abstract_meshed_region=None, + config=None, + server=None, + ): + super().__init__(name="entity_average_fc", config=config, server=server) self._inputs = InputsElementalMeanFc(self) self._outputs = OutputsElementalMeanFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if collapse_shell_layers !=None: + if collapse_shell_layers is not None: self.inputs.collapse_shell_layers.connect(collapse_shell_layers) - if force_averaging !=None: + if force_averaging is not None: self.inputs.force_averaging.connect(force_averaging) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if meshed_region !=None: - self.inputs.meshed_region.connect(meshed_region) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) @staticmethod def _spec(): - spec = Specification(description="""Computes the average of a multi-entity container of fields, (ElementalNodal -> Elemental), (NodalElemental -> Nodal). If the input fields are mixed shell/solid and collapseShellLayers is not asked, then the fields are splitted by element shape and the output fields container has elshape label.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "collapse_shell_layers", type_names=["bool"], optional=True, document="""if true shell layers are averaged as well (default is false)"""), - 2 : PinSpecification(name = "force_averaging", type_names=["bool"], optional=True, document="""if true you average, if false you just sum"""), - 3 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document="""average only on these elements, if it is scoping container, the label must correspond to the one of the fields container"""), - 4 : PinSpecification(name = "meshed_region", type_names=["abstract_meshed_region"], optional=True, document="""the mesh region in this pin is used to perform the averaging, if there is no field's support it is used""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes the average of a multi-entity container of fields, + (ElementalNodal -> Elemental), (NodalElemental -> Nodal). + If the input fields are mixed shell/solid and + collapseShellLayers is not asked, then the fields are + split by element shape and the output fields container has + elshape label.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="collapse_shell_layers", + type_names=["bool"], + optional=True, + document="""If true shell layers are averaged as well + (default is false)""", + ), + 2: PinSpecification( + name="force_averaging", + type_names=["bool"], + optional=True, + document="""If true you average, if false you just sum""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container""", + ), + 4: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "entity_average_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="entity_average_fc", server=server) @property def inputs(self): @@ -86,195 +173,195 @@ def inputs(self): Returns -------- - inputs : InputsElementalMeanFc + inputs : InputsElementalMeanFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalMeanFc + outputs : OutputsElementalMeanFc """ return super().outputs -#internal name: entity_average_fc -#scripting name: elemental_mean_fc class InputsElementalMeanFc(_Inputs): - """Intermediate class used to connect user inputs to elemental_mean_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_mean_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> my_force_averaging = bool() - >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) + """Intermediate class used to connect user inputs to + elemental_mean_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_mean_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + >>> my_force_averaging = bool() + >>> op.inputs.force_averaging.connect(my_force_averaging) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) """ + def __init__(self, op: Operator): super().__init__(elemental_mean_fc._spec().inputs, op) - self._fields_container = Input(elemental_mean_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + elemental_mean_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._collapse_shell_layers = Input(elemental_mean_fc._spec().input_pin(1), 1, op, -1) + self._collapse_shell_layers = Input( + elemental_mean_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._collapse_shell_layers) - self._force_averaging = Input(elemental_mean_fc._spec().input_pin(2), 2, op, -1) + self._force_averaging = Input(elemental_mean_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._force_averaging) - self._scoping = Input(elemental_mean_fc._spec().input_pin(3), 3, op, -1) + self._scoping = Input(elemental_mean_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._scoping) - self._meshed_region = Input(elemental_mean_fc._spec().input_pin(4), 4, op, -1) - self._inputs.append(self._meshed_region) + self._abstract_meshed_region = Input( + elemental_mean_fc._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def collapse_shell_layers(self): - """Allows to connect collapse_shell_layers input to the operator + """Allows to connect collapse_shell_layers input to the operator. - - pindoc: if true shell layers are averaged as well (default is false) + If true shell layers are averaged as well + (default is false) Parameters ---------- - my_collapse_shell_layers : bool, + my_collapse_shell_layers : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean_fc() >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> #or + >>> # or >>> op.inputs.collapse_shell_layers(my_collapse_shell_layers) - """ return self._collapse_shell_layers @property def force_averaging(self): - """Allows to connect force_averaging input to the operator + """Allows to connect force_averaging input to the operator. - - pindoc: if true you average, if false you just sum + If true you average, if false you just sum Parameters ---------- - my_force_averaging : bool, + my_force_averaging : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean_fc() >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> #or + >>> # or >>> op.inputs.force_averaging(my_force_averaging) - """ return self._force_averaging @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: average only on these elements, if it is scoping container, the label must correspond to the one of the fields container + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean_fc() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property - def meshed_region(self): - """Allows to connect meshed_region input to the operator + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. - - pindoc: the mesh region in this pin is used to perform the averaging, if there is no field's support it is used + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used Parameters ---------- - my_meshed_region : MeshedRegion, + my_abstract_meshed_region : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean_fc() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> #or - >>> op.inputs.meshed_region(my_meshed_region) - + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) """ - return self._meshed_region + return self._abstract_meshed_region + class OutputsElementalMeanFc(_Outputs): - """Intermediate class used to get outputs from elemental_mean_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_mean_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_mean_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_mean_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elemental_mean_fc._spec().outputs, op) - self._fields_container = Output(elemental_mean_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(elemental_mean_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_mean_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py index f0a28610c54..df3dbcb4a1f 100644 --- a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py +++ b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py @@ -1,78 +1,144 @@ """ elemental_nodal_to_nodal -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_nodal_to_nodal(Operator): - """Transform ElementalNodal field into Nodal field using an averaging process, result is computed on a given node scoping. - - available inputs: - - field (Field, FieldsContainer) - - mesh_scoping (Scoping) (optional) - - should_average (bool) (optional) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_should_average = bool() - >>> op.inputs.should_average.connect(my_should_average) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal(field=my_field,mesh_scoping=my_mesh_scoping,should_average=my_should_average,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, field=None, mesh_scoping=None, should_average=None, mesh=None, config=None, server=None): - super().__init__(name="elemental_nodal_To_nodal", config = config, server = server) + """Transform ElementalNodal field into Nodal field using an averaging + process, result is computed on a given node scoping. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : Scoping, optional + Average only on these entities + should_average : bool, optional + Each nodal value is divided by the number of + elements linked to this node (default + is true for discrete quantities) + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_should_average = bool() + >>> op.inputs.should_average.connect(my_should_average) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... should_average=my_should_average, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + field=None, + mesh_scoping=None, + should_average=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="elemental_nodal_To_nodal", config=config, server=server) self._inputs = InputsElementalNodalToNodal(self) self._outputs = OutputsElementalNodalToNodal(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if should_average !=None: + if should_average is not None: self.inputs.should_average.connect(should_average) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal field into Nodal field using an averaging process, result is computed on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""average only on these entities"""), - 2 : PinSpecification(name = "should_average", type_names=["bool"], optional=True, document="""each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal field into Nodal field using an averaging + process, result is computed on a given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Average only on these entities""", + ), + 2: PinSpecification( + name="should_average", + type_names=["bool"], + optional=True, + document="""Each nodal value is divided by the number of + elements linked to this node (default + is true for discrete quantities)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "elemental_nodal_To_nodal") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="elemental_nodal_To_nodal", server=server) @property def inputs(self): @@ -80,169 +146,166 @@ def inputs(self): Returns -------- - inputs : InputsElementalNodalToNodal + inputs : InputsElementalNodalToNodal """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalNodalToNodal + outputs : OutputsElementalNodalToNodal """ return super().outputs -#internal name: elemental_nodal_To_nodal -#scripting name: elemental_nodal_to_nodal class InputsElementalNodalToNodal(_Inputs): - """Intermediate class used to connect user inputs to elemental_nodal_to_nodal operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_should_average = bool() - >>> op.inputs.should_average.connect(my_should_average) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + elemental_nodal_to_nodal operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_should_average = bool() + >>> op.inputs.should_average.connect(my_should_average) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal._spec().inputs, op) - self._field = Input(elemental_nodal_to_nodal._spec().input_pin(0), 0, op, -1) + self._field = Input(elemental_nodal_to_nodal._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._mesh_scoping = Input(elemental_nodal_to_nodal._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + elemental_nodal_to_nodal._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._should_average = Input(elemental_nodal_to_nodal._spec().input_pin(2), 2, op, -1) + self._should_average = Input( + elemental_nodal_to_nodal._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._should_average) - self._mesh = Input(elemental_nodal_to_nodal._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elemental_nodal_to_nodal._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: average only on these entities + Average only on these entities Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def should_average(self): - """Allows to connect should_average input to the operator + """Allows to connect should_average input to the operator. - - pindoc: each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities) + Each nodal value is divided by the number of + elements linked to this node (default + is true for discrete quantities) Parameters ---------- - my_should_average : bool, + my_should_average : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() >>> op.inputs.should_average.connect(my_should_average) - >>> #or + >>> # or >>> op.inputs.should_average(my_should_average) - """ return self._should_average @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsElementalNodalToNodal(_Outputs): - """Intermediate class used to get outputs from elemental_nodal_to_nodal operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_nodal_to_nodal operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal._spec().outputs, op) - self._fields_container = Output(elemental_nodal_to_nodal._spec().output_pin(0), 0, op) - self._outputs.append(self._fields_container) + self._field = Output(elemental_nodal_to_nodal._spec().output_pin(0), 0, op) + self._outputs.append(self._field) @property - def fields_container(self): - """Allows to get fields_container output of the operator - + def field(self): + """Allows to get field output of the operator Returns ---------- - my_fields_container : FieldsContainer, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - return self._fields_container - + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py index 953f22949bf..0f9e9781a3d 100644 --- a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py +++ b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py @@ -1,66 +1,110 @@ """ elemental_nodal_to_nodal_elemental -================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_nodal_to_nodal_elemental(Operator): - """Transform ElementalNodal field to NodalElemental, compute result on a given node scoping. - - available inputs: - - field (Field, FieldsContainer) - - mesh_scoping (Scoping) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental(field=my_field,mesh_scoping=my_mesh_scoping) + """Transform ElementalNodal field to NodalElemental, compute result on a + given node scoping. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : Scoping, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="ElementalNodal_To_NodalElemental", config = config, server = server) + super().__init__( + name="ElementalNodal_To_NodalElemental", config=config, server=server + ) self._inputs = InputsElementalNodalToNodalElemental(self) self._outputs = OutputsElementalNodalToNodalElemental(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal field to NodalElemental, compute result on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Transform ElementalNodal field to NodalElemental, compute result on a + given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ElementalNodal_To_NodalElemental") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="ElementalNodal_To_NodalElemental", server=server + ) @property def inputs(self): @@ -68,117 +112,118 @@ def inputs(self): Returns -------- - inputs : InputsElementalNodalToNodalElemental + inputs : InputsElementalNodalToNodalElemental """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalNodalToNodalElemental + outputs : OutputsElementalNodalToNodalElemental """ return super().outputs -#internal name: ElementalNodal_To_NodalElemental -#scripting name: elemental_nodal_to_nodal_elemental class InputsElementalNodalToNodalElemental(_Inputs): - """Intermediate class used to connect user inputs to elemental_nodal_to_nodal_elemental operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + elemental_nodal_to_nodal_elemental operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) """ + def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_elemental._spec().inputs, op) - self._field = Input(elemental_nodal_to_nodal_elemental._spec().input_pin(0), 0, op, -1) + self._field = Input( + elemental_nodal_to_nodal_elemental._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field) - self._mesh_scoping = Input(elemental_nodal_to_nodal_elemental._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + elemental_nodal_to_nodal_elemental._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + class OutputsElementalNodalToNodalElemental(_Outputs): - """Intermediate class used to get outputs from elemental_nodal_to_nodal_elemental operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + elemental_nodal_to_nodal_elemental operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_elemental._spec().outputs, op) - self._field = Output(elemental_nodal_to_nodal_elemental._spec().output_pin(0), 0, op) + self._field = Output( + elemental_nodal_to_nodal_elemental._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py index 81bd48db9d8..79666219d17 100644 --- a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py +++ b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py @@ -1,66 +1,109 @@ """ elemental_nodal_to_nodal_elemental_fc -===================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_nodal_to_nodal_elemental_fc(Operator): - """Transform ElementalNodal fields to NodalElemental fields, compute result on a given node scoping. + """Transform ElementalNodal fields to NodalElemental fields, compute + result on a given node scoping. + + Parameters + ---------- + fields_container : FieldsContainer + mesh_scoping : Scoping, optional - available inputs: - - fields_container (FieldsContainer) - - mesh_scoping (Scoping) (optional) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc( + ... fields_container=my_fields_container, + ... mesh_scoping=my_mesh_scoping, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc(fields_container=my_fields_container,mesh_scoping=my_mesh_scoping) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="ElementalNodal_To_NodalElemental_fc", config = config, server = server) + def __init__( + self, fields_container=None, mesh_scoping=None, config=None, server=None + ): + super().__init__( + name="ElementalNodal_To_NodalElemental_fc", config=config, server=server + ) self._inputs = InputsElementalNodalToNodalElementalFc(self) self._outputs = OutputsElementalNodalToNodalElementalFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal fields to NodalElemental fields, compute result on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal fields to NodalElemental fields, compute + result on a given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ElementalNodal_To_NodalElemental_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="ElementalNodal_To_NodalElemental_fc", server=server + ) @property def inputs(self): @@ -68,115 +111,115 @@ def inputs(self): Returns -------- - inputs : InputsElementalNodalToNodalElementalFc + inputs : InputsElementalNodalToNodalElementalFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalNodalToNodalElementalFc + outputs : OutputsElementalNodalToNodalElementalFc """ return super().outputs -#internal name: ElementalNodal_To_NodalElemental_fc -#scripting name: elemental_nodal_to_nodal_elemental_fc class InputsElementalNodalToNodalElementalFc(_Inputs): - """Intermediate class used to connect user inputs to elemental_nodal_to_nodal_elemental_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + elemental_nodal_to_nodal_elemental_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) """ + def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_elemental_fc._spec().inputs, op) - self._fields_container = Input(elemental_nodal_to_nodal_elemental_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + elemental_nodal_to_nodal_elemental_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh_scoping = Input(elemental_nodal_to_nodal_elemental_fc._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + elemental_nodal_to_nodal_elemental_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + class OutputsElementalNodalToNodalElementalFc(_Outputs): - """Intermediate class used to get outputs from elemental_nodal_to_nodal_elemental_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_nodal_to_nodal_elemental_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_elemental_fc._spec().outputs, op) - self._fields_container = Output(elemental_nodal_to_nodal_elemental_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + elemental_nodal_to_nodal_elemental_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py index 0543c87272a..f46de7120bb 100644 --- a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py +++ b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py @@ -1,78 +1,159 @@ """ elemental_nodal_to_nodal_fc -=========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_nodal_to_nodal_fc(Operator): - """Transform ElementalNodal fields into Nodal fields using an averaging process, result is computed on a given node scoping. If the input fields are mixed shell/solid, then the fields are splitted by element shape and the output fields container has elshape label. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion, MeshesContainer) (optional) - - should_average (bool) (optional) - - scoping (Scoping, ScopingsContainer) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_should_average = bool() - >>> op.inputs.should_average.connect(my_should_average) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc(fields_container=my_fields_container,mesh=my_mesh,should_average=my_should_average,scoping=my_scoping) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, should_average=None, scoping=None, config=None, server=None): - super().__init__(name="elemental_nodal_To_nodal_fc", config = config, server = server) + """Transform ElementalNodal fields into Nodal fields using an averaging + process, result is computed on a given node scoping. If the input + fields are mixed shell/solid, then the fields are split by element + shape and the output fields container has elshape label. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion or MeshesContainer, optional + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used + should_average : bool, optional + Each nodal value is divided by the number of + elements linked to this node (default + is true for discrete quantities) + scoping : Scoping or ScopingsContainer, optional + Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_should_average = bool() + >>> op.inputs.should_average.connect(my_should_average) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... should_average=my_should_average, + ... scoping=my_scoping, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + should_average=None, + scoping=None, + config=None, + server=None, + ): + super().__init__( + name="elemental_nodal_To_nodal_fc", config=config, server=server + ) self._inputs = InputsElementalNodalToNodalFc(self) self._outputs = OutputsElementalNodalToNodalFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if should_average !=None: + if should_average is not None: self.inputs.should_average.connect(should_average) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal fields into Nodal fields using an averaging process, result is computed on a given node scoping. If the input fields are mixed shell/solid, then the fields are splitted by element shape and the output fields container has elshape label.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""the mesh region in this pin is used to perform the averaging, if there is no field's support it is used"""), - 2 : PinSpecification(name = "should_average", type_names=["bool"], optional=True, document="""each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities)"""), - 3 : PinSpecification(name = "scoping", type_names=["scoping","scopings_container"], optional=True, document="""average only on these nodes, if it is scoping container, the label must correspond to the one of the fields container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal fields into Nodal fields using an averaging + process, result is computed on a given node scoping. If + the input fields are mixed shell/solid, then the fields + are split by element shape and the output fields container + has elshape label.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used""", + ), + 2: PinSpecification( + name="should_average", + type_names=["bool"], + optional=True, + document="""Each nodal value is divided by the number of + elements linked to this node (default + is true for discrete quantities)""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping", "scopings_container"], + optional=True, + document="""Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "elemental_nodal_To_nodal_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="elemental_nodal_To_nodal_fc", server=server + ) @property def inputs(self): @@ -80,169 +161,173 @@ def inputs(self): Returns -------- - inputs : InputsElementalNodalToNodalFc + inputs : InputsElementalNodalToNodalFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalNodalToNodalFc + outputs : OutputsElementalNodalToNodalFc """ return super().outputs -#internal name: elemental_nodal_To_nodal_fc -#scripting name: elemental_nodal_to_nodal_fc class InputsElementalNodalToNodalFc(_Inputs): - """Intermediate class used to connect user inputs to elemental_nodal_to_nodal_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_should_average = bool() - >>> op.inputs.should_average.connect(my_should_average) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) + """Intermediate class used to connect user inputs to + elemental_nodal_to_nodal_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_should_average = bool() + >>> op.inputs.should_average.connect(my_should_average) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) """ + def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_fc._spec().inputs, op) - self._fields_container = Input(elemental_nodal_to_nodal_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + elemental_nodal_to_nodal_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(elemental_nodal_to_nodal_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(elemental_nodal_to_nodal_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._should_average = Input(elemental_nodal_to_nodal_fc._spec().input_pin(2), 2, op, -1) + self._should_average = Input( + elemental_nodal_to_nodal_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._should_average) - self._scoping = Input(elemental_nodal_to_nodal_fc._spec().input_pin(3), 3, op, -1) + self._scoping = Input( + elemental_nodal_to_nodal_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._scoping) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the mesh region in this pin is used to perform the averaging, if there is no field's support it is used + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def should_average(self): - """Allows to connect should_average input to the operator + """Allows to connect should_average input to the operator. - - pindoc: each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities) + Each nodal value is divided by the number of + elements linked to this node (default + is true for discrete quantities) Parameters ---------- - my_should_average : bool, + my_should_average : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() >>> op.inputs.should_average.connect(my_should_average) - >>> #or + >>> # or >>> op.inputs.should_average(my_should_average) - """ return self._should_average @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: average only on these nodes, if it is scoping container, the label must correspond to the one of the fields container + Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container Parameters ---------- - my_scoping : Scoping, ScopingsContainer, + my_scoping : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping + class OutputsElementalNodalToNodalFc(_Outputs): - """Intermediate class used to get outputs from elemental_nodal_to_nodal_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_nodal_to_nodal_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal_fc._spec().outputs, op) - self._fields_container = Output(elemental_nodal_to_nodal_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + elemental_nodal_to_nodal_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py b/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py new file mode 100644 index 00000000000..559ac2c6f8d --- /dev/null +++ b/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py @@ -0,0 +1,264 @@ +""" +elemental_to_elemental_nodal +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class elemental_to_elemental_nodal(Operator): + """Transform Elemental field to Elemental Nodal field. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : Scoping, optional + Average only on these entities + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, field=None, mesh_scoping=None, mesh=None, config=None, server=None + ): + super().__init__( + name="elemental_to_elemental_nodal", config=config, server=server + ) + self._inputs = InputsElementalToElementalNodal(self) + self._outputs = OutputsElementalToElementalNodal(self) + if field is not None: + self.inputs.field.connect(field) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if mesh is not None: + self.inputs.mesh.connect(mesh) + + @staticmethod + def _spec(): + description = """Transform Elemental field to Elemental Nodal field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Average only on these entities""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="elemental_to_elemental_nodal", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsElementalToElementalNodal + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsElementalToElementalNodal + """ + return super().outputs + + +class InputsElementalToElementalNodal(_Inputs): + """Intermediate class used to connect user inputs to + elemental_to_elemental_nodal operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + """ + + def __init__(self, op: Operator): + super().__init__(elemental_to_elemental_nodal._spec().inputs, op) + self._field = Input( + elemental_to_elemental_nodal._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._field) + self._mesh_scoping = Input( + elemental_to_elemental_nodal._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._mesh = Input(elemental_to_elemental_nodal._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + + @property + def field(self): + """Allows to connect field input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_field : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal() + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) + """ + return self._field + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Average only on these entities + + Parameters + ---------- + my_mesh_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + +class OutputsElementalToElementalNodal(_Outputs): + """Intermediate class used to get outputs from + elemental_to_elemental_nodal operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(elemental_to_elemental_nodal._spec().outputs, op) + self._field = Output(elemental_to_elemental_nodal._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py b/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py new file mode 100644 index 00000000000..f12325e9232 --- /dev/null +++ b/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py @@ -0,0 +1,264 @@ +""" +elemental_to_elemental_nodal_fc +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class elemental_to_elemental_nodal_fc(Operator): + """Transform Elemental field to Elemental Nodal field. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion, optional + mesh_scoping : Scoping, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + mesh_scoping=None, + config=None, + server=None, + ): + super().__init__( + name="elemental_to_elemental_nodal_fc", config=config, server=server + ) + self._inputs = InputsElementalToElementalNodalFc(self) + self._outputs = OutputsElementalToElementalNodalFc(self) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + + @staticmethod + def _spec(): + description = """Transform Elemental field to Elemental Nodal field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="elemental_to_elemental_nodal_fc", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsElementalToElementalNodalFc + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsElementalToElementalNodalFc + """ + return super().outputs + + +class InputsElementalToElementalNodalFc(_Inputs): + """Intermediate class used to connect user inputs to + elemental_to_elemental_nodal_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """ + + def __init__(self, op: Operator): + super().__init__(elemental_to_elemental_nodal_fc._spec().inputs, op) + self._fields_container = Input( + elemental_to_elemental_nodal_fc._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._fields_container) + self._mesh = Input( + elemental_to_elemental_nodal_fc._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh) + self._mesh_scoping = Input( + elemental_to_elemental_nodal_fc._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._mesh_scoping) + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Parameters + ---------- + my_mesh_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + +class OutputsElementalToElementalNodalFc(_Outputs): + """Intermediate class used to get outputs from + elemental_to_elemental_nodal_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(elemental_to_elemental_nodal_fc._spec().outputs, op) + self._fields_container = Output( + elemental_to_elemental_nodal_fc._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/averaging/elemental_to_nodal.py b/ansys/dpf/core/operators/averaging/elemental_to_nodal.py index 50a225049f6..2e790048181 100644 --- a/ansys/dpf/core/operators/averaging/elemental_to_nodal.py +++ b/ansys/dpf/core/operators/averaging/elemental_to_nodal.py @@ -1,72 +1,130 @@ """ elemental_to_nodal -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_to_nodal(Operator): - """Transform ElementalNodal field to Nodal field, compute result on a given node scoping. - - available inputs: - - field (Field, FieldsContainer) - - mesh_scoping (Scoping) (optional) - - force_averaging (int) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_to_nodal() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_force_averaging = int() - >>> op.inputs.force_averaging.connect(my_force_averaging) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_to_nodal(field=my_field,mesh_scoping=my_mesh_scoping,force_averaging=my_force_averaging) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, mesh_scoping=None, force_averaging=None, config=None, server=None): - super().__init__(name="elemental_to_nodal", config = config, server = server) + """Transform ElementalNodal field to Nodal field, compute result on a + given node scoping. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : Scoping, optional + force_averaging : int, optional + Averaging on nodes is used if this pin is set + to 1 (default is 1 for integrated + results and 0 for discrete ones) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_to_nodal() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_force_averaging = int() + >>> op.inputs.force_averaging.connect(my_force_averaging) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_to_nodal( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... force_averaging=my_force_averaging, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + field=None, + mesh_scoping=None, + force_averaging=None, + config=None, + server=None, + ): + super().__init__(name="elemental_to_nodal", config=config, server=server) self._inputs = InputsElementalToNodal(self) self._outputs = OutputsElementalToNodal(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if force_averaging !=None: + if force_averaging is not None: self.inputs.force_averaging.connect(force_averaging) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal field to Nodal field, compute result on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document=""""""), - 2 : PinSpecification(name = "force_averaging", type_names=["int32"], optional=True, document="""averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for dicrete ones)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Transform ElementalNodal field to Nodal field, compute result on a + given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="force_averaging", + type_names=["int32"], + optional=True, + document="""Averaging on nodes is used if this pin is set + to 1 (default is 1 for integrated + results and 0 for discrete ones)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "elemental_to_nodal") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="elemental_to_nodal", server=server) @property def inputs(self): @@ -74,143 +132,140 @@ def inputs(self): Returns -------- - inputs : InputsElementalToNodal + inputs : InputsElementalToNodal """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalToNodal + outputs : OutputsElementalToNodal """ return super().outputs -#internal name: elemental_to_nodal -#scripting name: elemental_to_nodal class InputsElementalToNodal(_Inputs): - """Intermediate class used to connect user inputs to elemental_to_nodal operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_to_nodal() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_force_averaging = int() - >>> op.inputs.force_averaging.connect(my_force_averaging) + """Intermediate class used to connect user inputs to + elemental_to_nodal operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_nodal() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_force_averaging = int() + >>> op.inputs.force_averaging.connect(my_force_averaging) """ + def __init__(self, op: Operator): super().__init__(elemental_to_nodal._spec().inputs, op) - self._field = Input(elemental_to_nodal._spec().input_pin(0), 0, op, -1) + self._field = Input(elemental_to_nodal._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._mesh_scoping = Input(elemental_to_nodal._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elemental_to_nodal._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._force_averaging = Input(elemental_to_nodal._spec().input_pin(2), 2, op, -1) + self._force_averaging = Input( + elemental_to_nodal._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._force_averaging) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def force_averaging(self): - """Allows to connect force_averaging input to the operator + """Allows to connect force_averaging input to the operator. - - pindoc: averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for dicrete ones) + Averaging on nodes is used if this pin is set + to 1 (default is 1 for integrated + results and 0 for discrete ones) Parameters ---------- - my_force_averaging : int, + my_force_averaging : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal() >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> #or + >>> # or >>> op.inputs.force_averaging(my_force_averaging) - """ return self._force_averaging + class OutputsElementalToNodal(_Outputs): - """Intermediate class used to get outputs from elemental_to_nodal operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_to_nodal() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + elemental_to_nodal operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_nodal() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(elemental_to_nodal._spec().outputs, op) - self._field = Output(elemental_to_nodal._spec().output_pin(0), 0, op) + self._field = Output(elemental_to_nodal._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py b/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py index 97dd91c8364..c32b9ba9919 100644 --- a/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py +++ b/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py @@ -1,78 +1,140 @@ """ elemental_to_nodal_fc -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class elemental_to_nodal_fc(Operator): - """Transform ElementalNodal fields to Nodal fields, compute result on a given node scoping. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion, MeshesContainer) (optional) - - force_averaging (int) (optional) - - mesh_scoping (Scoping, ScopingsContainer) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.elemental_to_nodal_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_force_averaging = int() - >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.elemental_to_nodal_fc(fields_container=my_fields_container,mesh=my_mesh,force_averaging=my_force_averaging,mesh_scoping=my_mesh_scoping) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, force_averaging=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="elemental_to_nodal_fc", config = config, server = server) + """Transform ElementalNodal fields to Nodal fields, compute result on a + given node scoping. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion or MeshesContainer, optional + force_averaging : int, optional + Averaging on nodes is used if this pin is set + to 1 (default is one for integrated + results and 0 for discrete ones) + mesh_scoping : Scoping or ScopingsContainer, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.elemental_to_nodal_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_force_averaging = int() + >>> op.inputs.force_averaging.connect(my_force_averaging) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.elemental_to_nodal_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... force_averaging=my_force_averaging, + ... mesh_scoping=my_mesh_scoping, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + force_averaging=None, + mesh_scoping=None, + config=None, + server=None, + ): + super().__init__(name="elemental_to_nodal_fc", config=config, server=server) self._inputs = InputsElementalToNodalFc(self) self._outputs = OutputsElementalToNodalFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if force_averaging !=None: + if force_averaging is not None: self.inputs.force_averaging.connect(force_averaging) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal fields to Nodal fields, compute result on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document=""""""), - 2 : PinSpecification(name = "force_averaging", type_names=["int32"], optional=True, document="""averaging on nodes is used if this pin is set to 1 (default is one for integrated results and 0 for dicrete ones)"""), - 3 : PinSpecification(name = "mesh_scoping", type_names=["scoping","scopings_container"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal fields to Nodal fields, compute result on a + given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="force_averaging", + type_names=["int32"], + optional=True, + document="""Averaging on nodes is used if this pin is set + to 1 (default is one for integrated + results and 0 for discrete ones)""", + ), + 3: PinSpecification( + name="mesh_scoping", + type_names=["scoping", "scopings_container"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "elemental_to_nodal_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="elemental_to_nodal_fc", server=server) @property def inputs(self): @@ -80,165 +142,165 @@ def inputs(self): Returns -------- - inputs : InputsElementalToNodalFc + inputs : InputsElementalToNodalFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalToNodalFc + outputs : OutputsElementalToNodalFc """ return super().outputs -#internal name: elemental_to_nodal_fc -#scripting name: elemental_to_nodal_fc class InputsElementalToNodalFc(_Inputs): - """Intermediate class used to connect user inputs to elemental_to_nodal_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_to_nodal_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_force_averaging = int() - >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + elemental_to_nodal_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_nodal_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_force_averaging = int() + >>> op.inputs.force_averaging.connect(my_force_averaging) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) """ + def __init__(self, op: Operator): super().__init__(elemental_to_nodal_fc._spec().inputs, op) - self._fields_container = Input(elemental_to_nodal_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + elemental_to_nodal_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(elemental_to_nodal_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(elemental_to_nodal_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._force_averaging = Input(elemental_to_nodal_fc._spec().input_pin(2), 2, op, -1) + self._force_averaging = Input( + elemental_to_nodal_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._force_averaging) - self._mesh_scoping = Input(elemental_to_nodal_fc._spec().input_pin(3), 3, op, -1) + self._mesh_scoping = Input( + elemental_to_nodal_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._mesh_scoping) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def force_averaging(self): - """Allows to connect force_averaging input to the operator + """Allows to connect force_averaging input to the operator. - - pindoc: averaging on nodes is used if this pin is set to 1 (default is one for integrated results and 0 for dicrete ones) + Averaging on nodes is used if this pin is set + to 1 (default is one for integrated + results and 0 for discrete ones) Parameters ---------- - my_force_averaging : int, + my_force_averaging : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal_fc() >>> op.inputs.force_averaging.connect(my_force_averaging) - >>> #or + >>> # or >>> op.inputs.force_averaging(my_force_averaging) - """ return self._force_averaging @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, ScopingsContainer, + my_mesh_scoping : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal_fc() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + class OutputsElementalToNodalFc(_Outputs): - """Intermediate class used to get outputs from elemental_to_nodal_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.elemental_to_nodal_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_to_nodal_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_to_nodal_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elemental_to_nodal_fc._spec().outputs, op) - self._fields_container = Output(elemental_to_nodal_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + elemental_to_nodal_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.elemental_to_nodal_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py b/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py index ee12154e5e5..a9204360cc1 100644 --- a/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py +++ b/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py @@ -1,66 +1,106 @@ """ extend_to_mid_nodes -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class extend_to_mid_nodes(Operator): - """Extends ElementalNodal field defined on corner nodes to a ElementalNodal field defined also on the mid nodes. - - available inputs: - - field (Field, FieldsContainer) - - mesh (MeshedRegion) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.extend_to_mid_nodes() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.extend_to_mid_nodes(field=my_field,mesh=my_mesh) + """Extends ElementalNodal field defined on corner nodes to a + ElementalNodal field defined also on the mid nodes. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.extend_to_mid_nodes() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.extend_to_mid_nodes( + ... field=my_field, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, mesh=None, config=None, server=None): - super().__init__(name="extend_to_mid_nodes", config = config, server = server) + super().__init__(name="extend_to_mid_nodes", config=config, server=server) self._inputs = InputsExtendToMidNodes(self) self._outputs = OutputsExtendToMidNodes(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Extends ElementalNodal field defined on corner nodes to a ElementalNodal field defined also on the mid nodes.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Extends ElementalNodal field defined on corner nodes to a + ElementalNodal field defined also on the mid nodes.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "extend_to_mid_nodes") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="extend_to_mid_nodes", server=server) @property def inputs(self): @@ -68,117 +108,112 @@ def inputs(self): Returns -------- - inputs : InputsExtendToMidNodes + inputs : InputsExtendToMidNodes """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsExtendToMidNodes + outputs : OutputsExtendToMidNodes """ return super().outputs -#internal name: extend_to_mid_nodes -#scripting name: extend_to_mid_nodes class InputsExtendToMidNodes(_Inputs): - """Intermediate class used to connect user inputs to extend_to_mid_nodes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.extend_to_mid_nodes() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + extend_to_mid_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.extend_to_mid_nodes() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(extend_to_mid_nodes._spec().inputs, op) - self._field = Input(extend_to_mid_nodes._spec().input_pin(0), 0, op, -1) + self._field = Input(extend_to_mid_nodes._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._mesh = Input(extend_to_mid_nodes._spec().input_pin(7), 7, op, -1) + self._mesh = Input(extend_to_mid_nodes._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.extend_to_mid_nodes() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.extend_to_mid_nodes() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsExtendToMidNodes(_Outputs): - """Intermediate class used to get outputs from extend_to_mid_nodes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.extend_to_mid_nodes() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + extend_to_mid_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.extend_to_mid_nodes() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(extend_to_mid_nodes._spec().outputs, op) - self._field = Output(extend_to_mid_nodes._spec().output_pin(0), 0, op) + self._field = Output(extend_to_mid_nodes._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.extend_to_mid_nodes() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py b/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py index ddfd6ff9749..eae8b8ce3a4 100644 --- a/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py +++ b/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py @@ -1,66 +1,108 @@ """ extend_to_mid_nodes_fc -====================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class extend_to_mid_nodes_fc(Operator): - """Extends ElementalNodal fields defined on corner nodes to ElementalNodal fields defined also on the mid nodes. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc(fields_container=my_fields_container,mesh=my_mesh) + """Extends ElementalNodal fields defined on corner nodes to + ElementalNodal fields defined also on the mid nodes. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion, optional + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, mesh=None, config=None, server=None): - super().__init__(name="extend_to_mid_nodes_fc", config = config, server = server) + super().__init__(name="extend_to_mid_nodes_fc", config=config, server=server) self._inputs = InputsExtendToMidNodesFc(self) self._outputs = OutputsExtendToMidNodesFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Extends ElementalNodal fields defined on corner nodes to ElementalNodal fields defined also on the mid nodes.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""the mesh region in this pin is used to perform the averaging, if there is no field's support it is used""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Extends ElementalNodal fields defined on corner nodes to + ElementalNodal fields defined also on the mid nodes.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "extend_to_mid_nodes_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="extend_to_mid_nodes_fc", server=server) @property def inputs(self): @@ -68,117 +110,117 @@ def inputs(self): Returns -------- - inputs : InputsExtendToMidNodesFc + inputs : InputsExtendToMidNodesFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsExtendToMidNodesFc + outputs : OutputsExtendToMidNodesFc """ return super().outputs -#internal name: extend_to_mid_nodes_fc -#scripting name: extend_to_mid_nodes_fc class InputsExtendToMidNodesFc(_Inputs): - """Intermediate class used to connect user inputs to extend_to_mid_nodes_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + extend_to_mid_nodes_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(extend_to_mid_nodes_fc._spec().inputs, op) - self._fields_container = Input(extend_to_mid_nodes_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + extend_to_mid_nodes_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(extend_to_mid_nodes_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(extend_to_mid_nodes_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the mesh region in this pin is used to perform the averaging, if there is no field's support it is used + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsExtendToMidNodesFc(_Outputs): - """Intermediate class used to get outputs from extend_to_mid_nodes_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + extend_to_mid_nodes_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(extend_to_mid_nodes_fc._spec().outputs, op) - self._fields_container = Output(extend_to_mid_nodes_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + extend_to_mid_nodes_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.extend_to_mid_nodes_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py b/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py index 12cf7644592..01cac4b97a5 100644 --- a/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py +++ b/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py @@ -1,72 +1,136 @@ """ gauss_to_node_fc -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class gauss_to_node_fc(Operator): - """Extrapolating results available at Gauss or quadrature points to nodal points for a field container. The available elements are : Linear quadrangle , parabolique quadrangle,Linear Hexagonal, quadratic hexagonal , linear tetrahedral, quadratic tetrahedral - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion, MeshesContainer) (optional) - - scoping (Scoping) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.gauss_to_node_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.gauss_to_node_fc(fields_container=my_fields_container,mesh=my_mesh,scoping=my_scoping) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, scoping=None, config=None, server=None): - super().__init__(name="gauss_to_node_fc", config = config, server = server) + """Extrapolating results available at Gauss or quadrature points to nodal + points for a field container. The available elements are : Linear + quadrangle , parabolique quadrangle,Linear Hexagonal, quadratic + hexagonal , linear tetrahedral, quadratic tetrahedral + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion or MeshesContainer, optional + The mesh region in this pin is used for + extrapolating results available at + gauss or quadrature points to nodal + points. + scoping : Scoping, optional + Extrapolating results on the scoping selected + by the user, if it is scoping + container, the label must correspond + to the one of the fields container + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.gauss_to_node_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.gauss_to_node_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... scoping=my_scoping, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, fields_container=None, mesh=None, scoping=None, config=None, server=None + ): + super().__init__(name="gauss_to_node_fc", config=config, server=server) self._inputs = InputsGaussToNodeFc(self) self._outputs = OutputsGaussToNodeFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) @staticmethod def _spec(): - spec = Specification(description="""Extrapolating results available at Gauss or quadrature points to nodal points for a field container. The available elements are : Linear quadrangle , parabolique quadrangle,Linear Hexagonal, quadratic hexagonal , linear tetrahedral, quadratic tetrahedral """, - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""the mesh region in this pin is used for Extrapolating results available at Gauss or quadrature points to nodal points."""), - 3 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document="""Extrapolating results on the scoping selected by the user, if it is scoping container, the label must correspond to the one of the fields container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Extrapolating results available at Gauss or quadrature points to nodal + points for a field container. The available elements are : + Linear quadrangle , parabolique quadrangle,Linear + Hexagonal, quadratic hexagonal , linear tetrahedral, + quadratic tetrahedral""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""The mesh region in this pin is used for + extrapolating results available at + gauss or quadrature points to nodal + points.""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""Extrapolating results on the scoping selected + by the user, if it is scoping + container, the label must correspond + to the one of the fields container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "gauss_to_node_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="gauss_to_node_fc", server=server) @property def inputs(self): @@ -74,143 +138,141 @@ def inputs(self): Returns -------- - inputs : InputsGaussToNodeFc + inputs : InputsGaussToNodeFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsGaussToNodeFc + outputs : OutputsGaussToNodeFc """ return super().outputs -#internal name: gauss_to_node_fc -#scripting name: gauss_to_node_fc class InputsGaussToNodeFc(_Inputs): - """Intermediate class used to connect user inputs to gauss_to_node_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.gauss_to_node_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) + """Intermediate class used to connect user inputs to + gauss_to_node_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.gauss_to_node_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) """ + def __init__(self, op: Operator): super().__init__(gauss_to_node_fc._spec().inputs, op) - self._fields_container = Input(gauss_to_node_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(gauss_to_node_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._mesh = Input(gauss_to_node_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(gauss_to_node_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._scoping = Input(gauss_to_node_fc._spec().input_pin(3), 3, op, -1) + self._scoping = Input(gauss_to_node_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._scoping) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.gauss_to_node_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the mesh region in this pin is used for Extrapolating results available at Gauss or quadrature points to nodal points. + The mesh region in this pin is used for + extrapolating results available at + gauss or quadrature points to nodal + points. Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.gauss_to_node_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: Extrapolating results on the scoping selected by the user, if it is scoping container, the label must correspond to the one of the fields container + Extrapolating results on the scoping selected + by the user, if it is scoping + container, the label must correspond + to the one of the fields container Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.gauss_to_node_fc() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping + class OutputsGaussToNodeFc(_Outputs): - """Intermediate class used to get outputs from gauss_to_node_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.gauss_to_node_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + gauss_to_node_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.gauss_to_node_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(gauss_to_node_fc._spec().outputs, op) - self._fields_container = Output(gauss_to_node_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(gauss_to_node_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.gauss_to_node_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/nodal_difference.py b/ansys/dpf/core/operators/averaging/nodal_difference.py index 0056b6321ae..87e621f06f4 100644 --- a/ansys/dpf/core/operators/averaging/nodal_difference.py +++ b/ansys/dpf/core/operators/averaging/nodal_difference.py @@ -1,72 +1,125 @@ """ nodal_difference -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class nodal_difference(Operator): - """Transform ElementalNodal field into Nodal field. Each nodal value is the maximum difference between the unaveraged computed result for all elements that share this particular node. Result is computed on a given node scoping. - - available inputs: - - field (Field, FieldsContainer) - - mesh_scoping (Scoping) (optional) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.nodal_difference() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.nodal_difference(field=my_field,mesh_scoping=my_mesh_scoping,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, field=None, mesh_scoping=None, mesh=None, config=None, server=None): - super().__init__(name="nodal_difference", config = config, server = server) + """Transform ElementalNodal field into Nodal field. Each nodal value is + the maximum difference between the unaveraged computed result for + all elements that share this particular node. Result is computed + on a given node scoping. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : Scoping, optional + Average only on these entities + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.nodal_difference() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.nodal_difference( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, field=None, mesh_scoping=None, mesh=None, config=None, server=None + ): + super().__init__(name="nodal_difference", config=config, server=server) self._inputs = InputsNodalDifference(self) self._outputs = OutputsNodalDifference(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal field into Nodal field. Each nodal value is the maximum difference between the unaveraged computed result for all elements that share this particular node. Result is computed on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""average only on these entities"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal field into Nodal field. Each nodal value is + the maximum difference between the unaveraged computed + result for all elements that share this particular node. + Result is computed on a given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Average only on these entities""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "nodal_difference") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="nodal_difference", server=server) @property def inputs(self): @@ -74,143 +127,136 @@ def inputs(self): Returns -------- - inputs : InputsNodalDifference + inputs : InputsNodalDifference """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalDifference + outputs : OutputsNodalDifference """ return super().outputs -#internal name: nodal_difference -#scripting name: nodal_difference class InputsNodalDifference(_Inputs): - """Intermediate class used to connect user inputs to nodal_difference operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_difference() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_difference operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_difference() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_difference._spec().inputs, op) - self._field = Input(nodal_difference._spec().input_pin(0), 0, op, -1) + self._field = Input(nodal_difference._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._mesh_scoping = Input(nodal_difference._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(nodal_difference._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._mesh = Input(nodal_difference._spec().input_pin(7), 7, op, -1) + self._mesh = Input(nodal_difference._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_difference() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: average only on these entities + Average only on these entities Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_difference() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_difference() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalDifference(_Outputs): - """Intermediate class used to get outputs from nodal_difference operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_difference() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_difference operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_difference() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(nodal_difference._spec().outputs, op) - self._fields_container = Output(nodal_difference._spec().output_pin(0), 0, op) - self._outputs.append(self._fields_container) + self._field = Output(nodal_difference._spec().output_pin(0), 0, op) + self._outputs.append(self._field) @property - def fields_container(self): - """Allows to get fields_container output of the operator - + def field(self): + """Allows to get field output of the operator Returns ---------- - my_fields_container : FieldsContainer, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_difference() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - return self._fields_container - + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/averaging/nodal_difference_fc.py b/ansys/dpf/core/operators/averaging/nodal_difference_fc.py index 32eb368805a..dc1ff05fa50 100644 --- a/ansys/dpf/core/operators/averaging/nodal_difference_fc.py +++ b/ansys/dpf/core/operators/averaging/nodal_difference_fc.py @@ -1,72 +1,136 @@ """ nodal_difference_fc -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class nodal_difference_fc(Operator): - """Transform ElementalNodal fields into Nodal fields. Each nodal value is the maximum difference between the unaveraged computed result for all elements that share this particular node. Result is computed on a given node scoping. If the input fields are mixed shell/solid, then the fields are splitted by element shape and the output fields container has elshape label. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion, MeshesContainer) (optional) - - scoping (Scoping, ScopingsContainer) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.nodal_difference_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.nodal_difference_fc(fields_container=my_fields_container,mesh=my_mesh,scoping=my_scoping) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, scoping=None, config=None, server=None): - super().__init__(name="nodal_difference_fc", config = config, server = server) + """Transform ElementalNodal fields into Nodal fields. Each nodal value is + the maximum difference between the unaveraged computed result for + all elements that share this particular node. Result is computed + on a given node scoping. If the input fields are mixed + shell/solid, then the fields are split by element shape and the + output fields container has elshape label. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion or MeshesContainer, optional + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used + scoping : Scoping or ScopingsContainer, optional + Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.nodal_difference_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.nodal_difference_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... scoping=my_scoping, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, fields_container=None, mesh=None, scoping=None, config=None, server=None + ): + super().__init__(name="nodal_difference_fc", config=config, server=server) self._inputs = InputsNodalDifferenceFc(self) self._outputs = OutputsNodalDifferenceFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal fields into Nodal fields. Each nodal value is the maximum difference between the unaveraged computed result for all elements that share this particular node. Result is computed on a given node scoping. If the input fields are mixed shell/solid, then the fields are splitted by element shape and the output fields container has elshape label.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""the mesh region in this pin is used to perform the averaging, if there is no field's support it is used"""), - 3 : PinSpecification(name = "scoping", type_names=["scoping","scopings_container"], optional=True, document="""average only on these nodes, if it is scoping container, the label must correspond to the one of the fields container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal fields into Nodal fields. Each nodal value is + the maximum difference between the unaveraged computed + result for all elements that share this particular node. + Result is computed on a given node scoping. If the input + fields are mixed shell/solid, then the fields are split by + element shape and the output fields container has elshape + label.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping", "scopings_container"], + optional=True, + document="""Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "nodal_difference_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="nodal_difference_fc", server=server) @property def inputs(self): @@ -74,143 +138,143 @@ def inputs(self): Returns -------- - inputs : InputsNodalDifferenceFc + inputs : InputsNodalDifferenceFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalDifferenceFc + outputs : OutputsNodalDifferenceFc """ return super().outputs -#internal name: nodal_difference_fc -#scripting name: nodal_difference_fc class InputsNodalDifferenceFc(_Inputs): - """Intermediate class used to connect user inputs to nodal_difference_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_difference_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) + """Intermediate class used to connect user inputs to + nodal_difference_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_difference_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) """ + def __init__(self, op: Operator): super().__init__(nodal_difference_fc._spec().inputs, op) - self._fields_container = Input(nodal_difference_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + nodal_difference_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(nodal_difference_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(nodal_difference_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._scoping = Input(nodal_difference_fc._spec().input_pin(3), 3, op, -1) + self._scoping = Input(nodal_difference_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._scoping) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_difference_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the mesh region in this pin is used to perform the averaging, if there is no field's support it is used + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_difference_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: average only on these nodes, if it is scoping container, the label must correspond to the one of the fields container + Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container Parameters ---------- - my_scoping : Scoping, ScopingsContainer, + my_scoping : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_difference_fc() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping + class OutputsNodalDifferenceFc(_Outputs): - """Intermediate class used to get outputs from nodal_difference_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_difference_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_difference_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_difference_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_difference_fc._spec().outputs, op) - self._fields_container = Output(nodal_difference_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_difference_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_difference_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py b/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py index 3d41db09f3a..c4b60c085e0 100644 --- a/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py +++ b/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py @@ -1,78 +1,154 @@ """ nodal_fraction_fc -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class nodal_fraction_fc(Operator): - """Transform ElementalNodal fields into Nodal fields. Each nodal value is the fraction between the nodal difference and the nodal average. Result is computed on a given node scoping. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion) (optional) - - scoping (Scoping) (optional) - - denominator (FieldsContainer) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.nodal_fraction_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_denominator = dpf.FieldsContainer() - >>> op.inputs.denominator.connect(my_denominator) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.nodal_fraction_fc(fields_container=my_fields_container,mesh=my_mesh,scoping=my_scoping,denominator=my_denominator) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, scoping=None, denominator=None, config=None, server=None): - super().__init__(name="nodal_fraction_fc", config = config, server = server) + """Transform ElementalNodal fields into Nodal fields. Each nodal value is + the fraction between the nodal difference and the nodal average. + Result is computed on a given node scoping. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion, optional + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used + scoping : Scoping, optional + Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container + denominator : FieldsContainer, optional + If a fields container is set in this pin, it + is used as the denominator of the + fraction instead of + elemental_nodal_to_nodal_fc + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.nodal_fraction_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_denominator = dpf.FieldsContainer() + >>> op.inputs.denominator.connect(my_denominator) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.nodal_fraction_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... scoping=my_scoping, + ... denominator=my_denominator, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + scoping=None, + denominator=None, + config=None, + server=None, + ): + super().__init__(name="nodal_fraction_fc", config=config, server=server) self._inputs = InputsNodalFractionFc(self) self._outputs = OutputsNodalFractionFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if denominator !=None: + if denominator is not None: self.inputs.denominator.connect(denominator) @staticmethod def _spec(): - spec = Specification(description="""Transform ElementalNodal fields into Nodal fields. Each nodal value is the fraction between the nodal difference and the nodal average. Result is computed on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""the mesh region in this pin is used to perform the averaging, if there is no field's support it is used"""), - 3 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document="""average only on these nodes, if it is scoping container, the label must correspond to the one of the fields container"""), - 6 : PinSpecification(name = "denominator", type_names=["fields_container"], optional=True, document="""if a fields container is set in this pin, it is used as the denominator of the fraction instead of elemental_nodal_To_nodal_fc""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform ElementalNodal fields into Nodal fields. Each nodal value is + the fraction between the nodal difference and the nodal + average. Result is computed on a given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container""", + ), + 6: PinSpecification( + name="denominator", + type_names=["fields_container"], + optional=True, + document="""If a fields container is set in this pin, it + is used as the denominator of the + fraction instead of + elemental_nodal_to_nodal_fc""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "nodal_fraction_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="nodal_fraction_fc", server=server) @property def inputs(self): @@ -80,169 +156,168 @@ def inputs(self): Returns -------- - inputs : InputsNodalFractionFc + inputs : InputsNodalFractionFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalFractionFc + outputs : OutputsNodalFractionFc """ return super().outputs -#internal name: nodal_fraction_fc -#scripting name: nodal_fraction_fc class InputsNodalFractionFc(_Inputs): - """Intermediate class used to connect user inputs to nodal_fraction_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_fraction_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_denominator = dpf.FieldsContainer() - >>> op.inputs.denominator.connect(my_denominator) + """Intermediate class used to connect user inputs to + nodal_fraction_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_fraction_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_denominator = dpf.FieldsContainer() + >>> op.inputs.denominator.connect(my_denominator) """ + def __init__(self, op: Operator): super().__init__(nodal_fraction_fc._spec().inputs, op) - self._fields_container = Input(nodal_fraction_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + nodal_fraction_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(nodal_fraction_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(nodal_fraction_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._scoping = Input(nodal_fraction_fc._spec().input_pin(3), 3, op, -1) + self._scoping = Input(nodal_fraction_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._scoping) - self._denominator = Input(nodal_fraction_fc._spec().input_pin(6), 6, op, -1) + self._denominator = Input(nodal_fraction_fc._spec().input_pin(6), 6, op, -1) self._inputs.append(self._denominator) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_fraction_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the mesh region in this pin is used to perform the averaging, if there is no field's support it is used + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_fraction_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: average only on these nodes, if it is scoping container, the label must correspond to the one of the fields container + Average only on these nodes, if it is scoping + container, the label must correspond + to the one of the fields container Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_fraction_fc() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def denominator(self): - """Allows to connect denominator input to the operator + """Allows to connect denominator input to the operator. - - pindoc: if a fields container is set in this pin, it is used as the denominator of the fraction instead of elemental_nodal_To_nodal_fc + If a fields container is set in this pin, it + is used as the denominator of the + fraction instead of + elemental_nodal_to_nodal_fc Parameters ---------- - my_denominator : FieldsContainer, + my_denominator : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_fraction_fc() >>> op.inputs.denominator.connect(my_denominator) - >>> #or + >>> # or >>> op.inputs.denominator(my_denominator) - """ return self._denominator + class OutputsNodalFractionFc(_Outputs): - """Intermediate class used to get outputs from nodal_fraction_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_fraction_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_fraction_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_fraction_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_fraction_fc._spec().outputs, op) - self._fields_container = Output(nodal_fraction_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(nodal_fraction_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_fraction_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/nodal_to_elemental.py b/ansys/dpf/core/operators/averaging/nodal_to_elemental.py index 0cb4e53f6f1..4da7fea6e89 100644 --- a/ansys/dpf/core/operators/averaging/nodal_to_elemental.py +++ b/ansys/dpf/core/operators/averaging/nodal_to_elemental.py @@ -1,72 +1,128 @@ """ nodal_to_elemental -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class nodal_to_elemental(Operator): - """Transform Nodal field to Elemental field, compute result on a given element scoping. - - available inputs: - - field (Field, FieldsContainer) - - mesh_scoping (Scoping) (optional) - - collapse_shell_layers (bool) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.nodal_to_elemental() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.nodal_to_elemental(field=my_field,mesh_scoping=my_mesh_scoping,collapse_shell_layers=my_collapse_shell_layers) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, mesh_scoping=None, collapse_shell_layers=None, config=None, server=None): - super().__init__(name="nodal_to_elemental", config = config, server = server) + """Transform Nodal field to Elemental field, compute result on a given + element scoping. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : Scoping, optional + collapse_shell_layers : bool, optional + If true shell layers are averaged as well + (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.nodal_to_elemental() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.nodal_to_elemental( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... collapse_shell_layers=my_collapse_shell_layers, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + field=None, + mesh_scoping=None, + collapse_shell_layers=None, + config=None, + server=None, + ): + super().__init__(name="nodal_to_elemental", config=config, server=server) self._inputs = InputsNodalToElemental(self) self._outputs = OutputsNodalToElemental(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if collapse_shell_layers !=None: + if collapse_shell_layers is not None: self.inputs.collapse_shell_layers.connect(collapse_shell_layers) @staticmethod def _spec(): - spec = Specification(description="""Transform Nodal field to Elemental field, compute result on a given element scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document=""""""), - 10 : PinSpecification(name = "collapse_shell_layers", type_names=["bool"], optional=True, document="""if true shell layers are averaged as well (default is false)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Transform Nodal field to Elemental field, compute result on a given + element scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 10: PinSpecification( + name="collapse_shell_layers", + type_names=["bool"], + optional=True, + document="""If true shell layers are averaged as well + (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "nodal_to_elemental") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="nodal_to_elemental", server=server) @property def inputs(self): @@ -74,143 +130,139 @@ def inputs(self): Returns -------- - inputs : InputsNodalToElemental + inputs : InputsNodalToElemental """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalToElemental + outputs : OutputsNodalToElemental """ return super().outputs -#internal name: nodal_to_elemental -#scripting name: nodal_to_elemental class InputsNodalToElemental(_Inputs): - """Intermediate class used to connect user inputs to nodal_to_elemental operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_to_elemental() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + """Intermediate class used to connect user inputs to + nodal_to_elemental operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_to_elemental() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) """ + def __init__(self, op: Operator): super().__init__(nodal_to_elemental._spec().inputs, op) - self._field = Input(nodal_to_elemental._spec().input_pin(0), 0, op, -1) + self._field = Input(nodal_to_elemental._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._mesh_scoping = Input(nodal_to_elemental._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(nodal_to_elemental._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._collapse_shell_layers = Input(nodal_to_elemental._spec().input_pin(10), 10, op, -1) + self._collapse_shell_layers = Input( + nodal_to_elemental._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._collapse_shell_layers) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def collapse_shell_layers(self): - """Allows to connect collapse_shell_layers input to the operator + """Allows to connect collapse_shell_layers input to the operator. - - pindoc: if true shell layers are averaged as well (default is false) + If true shell layers are averaged as well + (default is false) Parameters ---------- - my_collapse_shell_layers : bool, + my_collapse_shell_layers : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental() >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> #or + >>> # or >>> op.inputs.collapse_shell_layers(my_collapse_shell_layers) - """ return self._collapse_shell_layers + class OutputsNodalToElemental(_Outputs): - """Intermediate class used to get outputs from nodal_to_elemental operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_to_elemental() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + nodal_to_elemental operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_to_elemental() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(nodal_to_elemental._spec().outputs, op) - self._field = Output(nodal_to_elemental._spec().output_pin(0), 0, op) + self._field = Output(nodal_to_elemental._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py b/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py index 7eefb7b8796..b25362fe906 100644 --- a/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py +++ b/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py @@ -1,78 +1,157 @@ """ nodal_to_elemental_fc -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class nodal_to_elemental_fc(Operator): - """Transform Nodal fields into Elemental fields using an averaging process, result is computed on a given elements scoping. If the input fields are mixed shell/solid and the shells layers are not asked to be collapsed, then the fields are splitted by element shape and the output fields container has elshape label. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion, MeshesContainer) (optional) - - scoping (Scoping, ScopingsContainer) (optional) - - collapse_shell_layers (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.nodal_to_elemental_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.nodal_to_elemental_fc(fields_container=my_fields_container,mesh=my_mesh,scoping=my_scoping,collapse_shell_layers=my_collapse_shell_layers) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, scoping=None, collapse_shell_layers=None, config=None, server=None): - super().__init__(name="nodal_to_elemental_fc", config = config, server = server) + """Transform Nodal fields into Elemental fields using an averaging + process, result is computed on a given elements scoping. If the + input fields are mixed shell/solid and the shells layers are not + asked to be collapsed, then the fields are split by element shape + and the output fields container has elshape label. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion or MeshesContainer, optional + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used + scoping : Scoping or ScopingsContainer, optional + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container + collapse_shell_layers : bool, optional + If true shell layers are averaged as well + (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.nodal_to_elemental_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.nodal_to_elemental_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... scoping=my_scoping, + ... collapse_shell_layers=my_collapse_shell_layers, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + scoping=None, + collapse_shell_layers=None, + config=None, + server=None, + ): + super().__init__(name="nodal_to_elemental_fc", config=config, server=server) self._inputs = InputsNodalToElementalFc(self) self._outputs = OutputsNodalToElementalFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if collapse_shell_layers !=None: + if collapse_shell_layers is not None: self.inputs.collapse_shell_layers.connect(collapse_shell_layers) @staticmethod def _spec(): - spec = Specification(description="""Transform Nodal fields into Elemental fields using an averaging process, result is computed on a given elements scoping. If the input fields are mixed shell/solid and the shells layers are not asked to be collapsed, then the fields are splitted by element shape and the output fields container has elshape label.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""the mesh region in this pin is used to perform the averaging, if there is no field's support it is used"""), - 3 : PinSpecification(name = "scoping", type_names=["scoping","scopings_container"], optional=True, document="""average only on these elements, if it is scoping container, the label must correspond to the one of the fields container"""), - 10 : PinSpecification(name = "collapse_shell_layers", type_names=["bool"], optional=True, document="""if true shell layers are averaged as well (default is false)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform Nodal fields into Elemental fields using an averaging + process, result is computed on a given elements scoping. + If the input fields are mixed shell/solid and the shells + layers are not asked to be collapsed, then the fields are + split by element shape and the output fields container has + elshape label.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used""", + ), + 3: PinSpecification( + name="scoping", + type_names=["scoping", "scopings_container"], + optional=True, + document="""Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container""", + ), + 10: PinSpecification( + name="collapse_shell_layers", + type_names=["bool"], + optional=True, + document="""If true shell layers are averaged as well + (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "nodal_to_elemental_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="nodal_to_elemental_fc", server=server) @property def inputs(self): @@ -80,169 +159,171 @@ def inputs(self): Returns -------- - inputs : InputsNodalToElementalFc + inputs : InputsNodalToElementalFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalToElementalFc + outputs : OutputsNodalToElementalFc """ return super().outputs -#internal name: nodal_to_elemental_fc -#scripting name: nodal_to_elemental_fc class InputsNodalToElementalFc(_Inputs): - """Intermediate class used to connect user inputs to nodal_to_elemental_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_to_elemental_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + """Intermediate class used to connect user inputs to + nodal_to_elemental_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_to_elemental_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) """ + def __init__(self, op: Operator): super().__init__(nodal_to_elemental_fc._spec().inputs, op) - self._fields_container = Input(nodal_to_elemental_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + nodal_to_elemental_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._mesh = Input(nodal_to_elemental_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(nodal_to_elemental_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._scoping = Input(nodal_to_elemental_fc._spec().input_pin(3), 3, op, -1) + self._scoping = Input(nodal_to_elemental_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._scoping) - self._collapse_shell_layers = Input(nodal_to_elemental_fc._spec().input_pin(10), 10, op, -1) + self._collapse_shell_layers = Input( + nodal_to_elemental_fc._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._collapse_shell_layers) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the mesh region in this pin is used to perform the averaging, if there is no field's support it is used + The mesh region in this pin is used to + perform the averaging, if there is no + field's support it is used Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: average only on these elements, if it is scoping container, the label must correspond to the one of the fields container + Average only on these elements, if it is + scoping container, the label must + correspond to the one of the fields + container Parameters ---------- - my_scoping : Scoping, ScopingsContainer, + my_scoping : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental_fc() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def collapse_shell_layers(self): - """Allows to connect collapse_shell_layers input to the operator + """Allows to connect collapse_shell_layers input to the operator. - - pindoc: if true shell layers are averaged as well (default is false) + If true shell layers are averaged as well + (default is false) Parameters ---------- - my_collapse_shell_layers : bool, + my_collapse_shell_layers : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental_fc() >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> #or + >>> # or >>> op.inputs.collapse_shell_layers(my_collapse_shell_layers) - """ return self._collapse_shell_layers + class OutputsNodalToElementalFc(_Outputs): - """Intermediate class used to get outputs from nodal_to_elemental_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.nodal_to_elemental_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_to_elemental_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.nodal_to_elemental_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_to_elemental_fc._spec().outputs, op) - self._fields_container = Output(nodal_to_elemental_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_to_elemental_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.nodal_to_elemental_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/to_elemental_fc.py b/ansys/dpf/core/operators/averaging/to_elemental_fc.py index 919d3cbc0be..b5a3fcd02d2 100644 --- a/ansys/dpf/core/operators/averaging/to_elemental_fc.py +++ b/ansys/dpf/core/operators/averaging/to_elemental_fc.py @@ -1,84 +1,158 @@ """ to_elemental_fc =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class to_elemental_fc(Operator): - """Transform input fields into Elemental fields using an averaging process, result is computed on a given elements scoping. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion) (optional) - - mesh_scoping (Scoping) (optional) - - smoothen_values (bool) (optional) - - collapse_shell_layers (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.to_elemental_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_smoothen_values = bool() - >>> op.inputs.smoothen_values.connect(my_smoothen_values) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.to_elemental_fc(fields_container=my_fields_container,mesh=my_mesh,mesh_scoping=my_mesh_scoping,smoothen_values=my_smoothen_values,collapse_shell_layers=my_collapse_shell_layers) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, mesh_scoping=None, smoothen_values=None, collapse_shell_layers=None, config=None, server=None): - super().__init__(name="to_elemental_fc", config = config, server = server) + """Transform input fields into Elemental fields using an averaging + process, result is computed on a given elements scoping. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion, optional + mesh_scoping : Scoping, optional + smoothen_values : bool, optional + If it is set to true, elemental nodal fields + are first averaged on nodes and then + averaged on elements (default is + false) + collapse_shell_layers : bool, optional + If true shell layers are averaged as well + (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.to_elemental_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_smoothen_values = bool() + >>> op.inputs.smoothen_values.connect(my_smoothen_values) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.to_elemental_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... smoothen_values=my_smoothen_values, + ... collapse_shell_layers=my_collapse_shell_layers, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + mesh_scoping=None, + smoothen_values=None, + collapse_shell_layers=None, + config=None, + server=None, + ): + super().__init__(name="to_elemental_fc", config=config, server=server) self._inputs = InputsToElementalFc(self) self._outputs = OutputsToElementalFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if smoothen_values !=None: + if smoothen_values is not None: self.inputs.smoothen_values.connect(smoothen_values) - if collapse_shell_layers !=None: + if collapse_shell_layers is not None: self.inputs.collapse_shell_layers.connect(collapse_shell_layers) @staticmethod def _spec(): - spec = Specification(description="""Transform input fields into Elemental fields using an averaging process, result is computed on a given elements scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document=""""""), - 3 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document=""""""), - 7 : PinSpecification(name = "smoothen_values", type_names=["bool"], optional=True, document="""if it is set to true, elemental nodal fields are first averaged on nodes and then averaged on elements (default is false)"""), - 10 : PinSpecification(name = "collapse_shell_layers", type_names=["bool"], optional=True, document="""if true shell layers are averaged as well (default is false)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform input fields into Elemental fields using an averaging + process, result is computed on a given elements scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 7: PinSpecification( + name="smoothen_values", + type_names=["bool"], + optional=True, + document="""If it is set to true, elemental nodal fields + are first averaged on nodes and then + averaged on elements (default is + false)""", + ), + 10: PinSpecification( + name="collapse_shell_layers", + type_names=["bool"], + optional=True, + document="""If true shell layers are averaged as well + (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "to_elemental_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="to_elemental_fc", server=server) @property def inputs(self): @@ -86,191 +160,185 @@ def inputs(self): Returns -------- - inputs : InputsToElementalFc + inputs : InputsToElementalFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsToElementalFc + outputs : OutputsToElementalFc """ return super().outputs -#internal name: to_elemental_fc -#scripting name: to_elemental_fc class InputsToElementalFc(_Inputs): - """Intermediate class used to connect user inputs to to_elemental_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.to_elemental_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_smoothen_values = bool() - >>> op.inputs.smoothen_values.connect(my_smoothen_values) - >>> my_collapse_shell_layers = bool() - >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) + """Intermediate class used to connect user inputs to + to_elemental_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.to_elemental_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_smoothen_values = bool() + >>> op.inputs.smoothen_values.connect(my_smoothen_values) + >>> my_collapse_shell_layers = bool() + >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) """ + def __init__(self, op: Operator): super().__init__(to_elemental_fc._spec().inputs, op) - self._fields_container = Input(to_elemental_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(to_elemental_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._mesh = Input(to_elemental_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(to_elemental_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(to_elemental_fc._spec().input_pin(3), 3, op, -1) + self._mesh_scoping = Input(to_elemental_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._mesh_scoping) - self._smoothen_values = Input(to_elemental_fc._spec().input_pin(7), 7, op, -1) + self._smoothen_values = Input(to_elemental_fc._spec().input_pin(7), 7, op, -1) self._inputs.append(self._smoothen_values) - self._collapse_shell_layers = Input(to_elemental_fc._spec().input_pin(10), 10, op, -1) + self._collapse_shell_layers = Input( + to_elemental_fc._spec().input_pin(10), 10, op, -1 + ) self._inputs.append(self._collapse_shell_layers) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_elemental_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_elemental_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_elemental_fc() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def smoothen_values(self): - """Allows to connect smoothen_values input to the operator + """Allows to connect smoothen_values input to the operator. - - pindoc: if it is set to true, elemental nodal fields are first averaged on nodes and then averaged on elements (default is false) + If it is set to true, elemental nodal fields + are first averaged on nodes and then + averaged on elements (default is + false) Parameters ---------- - my_smoothen_values : bool, + my_smoothen_values : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_elemental_fc() >>> op.inputs.smoothen_values.connect(my_smoothen_values) - >>> #or + >>> # or >>> op.inputs.smoothen_values(my_smoothen_values) - """ return self._smoothen_values @property def collapse_shell_layers(self): - """Allows to connect collapse_shell_layers input to the operator + """Allows to connect collapse_shell_layers input to the operator. - - pindoc: if true shell layers are averaged as well (default is false) + If true shell layers are averaged as well + (default is false) Parameters ---------- - my_collapse_shell_layers : bool, + my_collapse_shell_layers : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_elemental_fc() >>> op.inputs.collapse_shell_layers.connect(my_collapse_shell_layers) - >>> #or + >>> # or >>> op.inputs.collapse_shell_layers(my_collapse_shell_layers) - """ return self._collapse_shell_layers + class OutputsToElementalFc(_Outputs): - """Intermediate class used to get outputs from to_elemental_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.to_elemental_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + to_elemental_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.to_elemental_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(to_elemental_fc._spec().outputs, op) - self._fields_container = Output(to_elemental_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(to_elemental_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_elemental_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/averaging/to_nodal.py b/ansys/dpf/core/operators/averaging/to_nodal.py index b3716352727..77181255010 100644 --- a/ansys/dpf/core/operators/averaging/to_nodal.py +++ b/ansys/dpf/core/operators/averaging/to_nodal.py @@ -1,66 +1,106 @@ """ to_nodal -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class to_nodal(Operator): - """Transform input field into Nodal field using an averaging process, result is computed on a given node scoping. - - available inputs: - - field (Field, FieldsContainer) - - mesh_scoping (Scoping) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.to_nodal() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.to_nodal(field=my_field,mesh_scoping=my_mesh_scoping) + """Transform input field into Nodal field using an averaging process, + result is computed on a given node scoping. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : Scoping, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.to_nodal() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.to_nodal( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="to_nodal", config = config, server = server) + super().__init__(name="to_nodal", config=config, server=server) self._inputs = InputsToNodal(self) self._outputs = OutputsToNodal(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) @staticmethod def _spec(): - spec = Specification(description="""Transform input field into Nodal field using an averaging process, result is computed on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Transform input field into Nodal field using an averaging process, + result is computed on a given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "to_nodal") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="to_nodal", server=server) @property def inputs(self): @@ -68,117 +108,112 @@ def inputs(self): Returns -------- - inputs : InputsToNodal + inputs : InputsToNodal """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsToNodal + outputs : OutputsToNodal """ return super().outputs -#internal name: to_nodal -#scripting name: to_nodal class InputsToNodal(_Inputs): - """Intermediate class used to connect user inputs to to_nodal operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.to_nodal() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + to_nodal operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.to_nodal() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) """ + def __init__(self, op: Operator): super().__init__(to_nodal._spec().inputs, op) - self._field = Input(to_nodal._spec().input_pin(0), 0, op, -1) + self._field = Input(to_nodal._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._mesh_scoping = Input(to_nodal._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(to_nodal._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_nodal() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_nodal() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + class OutputsToNodal(_Outputs): - """Intermediate class used to get outputs from to_nodal operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.to_nodal() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + to_nodal operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.to_nodal() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(to_nodal._spec().outputs, op) - self._field = Output(to_nodal._spec().output_pin(0), 0, op) + self._field = Output(to_nodal._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_nodal() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/averaging/to_nodal_fc.py b/ansys/dpf/core/operators/averaging/to_nodal_fc.py index 87ed78a7900..e08c9109f0a 100644 --- a/ansys/dpf/core/operators/averaging/to_nodal_fc.py +++ b/ansys/dpf/core/operators/averaging/to_nodal_fc.py @@ -1,72 +1,122 @@ """ to_nodal_fc -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "averaging" category -""" class to_nodal_fc(Operator): - """Transform input fields into Nodal fields using an averaging process, result is computed on a given node scoping. - - available inputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion) (optional) - - mesh_scoping (Scoping) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.averaging.to_nodal_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.averaging.to_nodal_fc(fields_container=my_fields_container,mesh=my_mesh,mesh_scoping=my_mesh_scoping) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="to_nodal_fc", config = config, server = server) + """Transform input fields into Nodal fields using an averaging process, + result is computed on a given node scoping. + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion, optional + mesh_scoping : Scoping, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.averaging.to_nodal_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.averaging.to_nodal_fc( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + mesh_scoping=None, + config=None, + server=None, + ): + super().__init__(name="to_nodal_fc", config=config, server=server) self._inputs = InputsToNodalFc(self) self._outputs = OutputsToNodalFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) @staticmethod def _spec(): - spec = Specification(description="""Transform input fields into Nodal fields using an averaging process, result is computed on a given node scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document=""""""), - 3 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Transform input fields into Nodal fields using an averaging process, + result is computed on a given node scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "to_nodal_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="to_nodal_fc", server=server) @property def inputs(self): @@ -74,139 +124,131 @@ def inputs(self): Returns -------- - inputs : InputsToNodalFc + inputs : InputsToNodalFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsToNodalFc + outputs : OutputsToNodalFc """ return super().outputs -#internal name: to_nodal_fc -#scripting name: to_nodal_fc class InputsToNodalFc(_Inputs): - """Intermediate class used to connect user inputs to to_nodal_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.to_nodal_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + to_nodal_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.to_nodal_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) """ + def __init__(self, op: Operator): super().__init__(to_nodal_fc._spec().inputs, op) - self._fields_container = Input(to_nodal_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(to_nodal_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._mesh = Input(to_nodal_fc._spec().input_pin(1), 1, op, -1) + self._mesh = Input(to_nodal_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(to_nodal_fc._spec().input_pin(3), 3, op, -1) + self._mesh_scoping = Input(to_nodal_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._mesh_scoping) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_nodal_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_nodal_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_nodal_fc() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + class OutputsToNodalFc(_Outputs): - """Intermediate class used to get outputs from to_nodal_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.averaging.to_nodal_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + to_nodal_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.to_nodal_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(to_nodal_fc._spec().outputs, op) - self._fields_container = Output(to_nodal_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(to_nodal_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.averaging.to_nodal_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/build.py b/ansys/dpf/core/operators/build.py new file mode 100644 index 00000000000..dbc993e5d32 --- /dev/null +++ b/ansys/dpf/core/operators/build.py @@ -0,0 +1,220 @@ +"""Build static source operators from DPF server.""" +import copy +import os +from datetime import datetime +from textwrap import wrap + +import black +import chevron +from ansys.dpf import core as dpf +from ansys.dpf.core import common +from ansys.dpf.core.dpf_operator import available_operator_names +from ansys.dpf.core.outputs import _make_printable_type +from ansys.dpf.core.mapping_types import map_types_to_python + + +def build_docstring(specification): + """Used to generate class docstrings.""" + docstring = "" + if specification.description: + docstring += "\n".join( + wrap(specification.description, subsequent_indent=" ") + ) + docstring += "\n\n" + docstring = docstring.rstrip() + return docstring.replace('"', "'") + + +def map_types(cpp_types): + """Map C++ object types to Python types.""" + types = [] + # These types don't get mapped to Python types + types_to_ignore = ["vector", "umap", "enum"] + for cpp_type in cpp_types: + if any(type_name in cpp_type for type_name in types_to_ignore): + continue + else: + types.append(map_types_to_python[cpp_type]) + return types + + +def update_type_names_for_ellipsis(type_names): + # Remove vector and umap types from the Python type + new_types = [] + for name in type_names: + if name == "vector" or name == "vector": + new_types.append(name) + elif "vector" not in name and "umap" not in name: + new_types.append(name) + return new_types + + +def build_pin_data(pins, output=False): + """Build pin data for use within template.""" + pin_ids = [pin for pin in pins] + pin_ids.sort() + + data = [] + for id in pin_ids: + specification = pins[id] + + type_names = specification.type_names + if specification.ellipsis: + type_names = update_type_names_for_ellipsis(type_names) + docstring_types = map_types(type_names) + parameter_types = " or ".join(docstring_types) + parameter_types = "\n".join( + wrap(parameter_types, subsequent_indent=" ", width=60) + ) + + pin_name = specification.name + pin_name = pin_name.replace("<", "_") + pin_name = pin_name.replace(">", "_") + + main_type = docstring_types[0] if len(docstring_types) >= 1 else "" + built_in_types = ("int", "double", "string", "bool", "float", "str") + + # Case where output pin has multiple types. + multiple_types = len(type_names) >= 2 + printable_type_names = type_names + if multiple_types and output: + printable_type_names = [_make_printable_type(name) for name in type_names] + + pin_data = { + "id": id, + "name": pin_name, + "pin_name": pin_name, # Base pin name, without numbers for when pin is ellipsis + "has_types": len(type_names) >= 1, + "multiple_types": multiple_types, + "printable_type_names": printable_type_names, + "types": type_names, + "types_for_docstring": parameter_types, + "main_type": main_type, + "built_in_main_type": main_type in built_in_types, + "optional": specification.optional, + "document": "\n".join( + wrap( + specification.document.capitalize(), + subsequent_indent=" ", + width=45, + ) + ), + "ellipsis": 0 if specification.ellipsis else -1, + } + + if specification.ellipsis: + # Create two pins for ellipsis field with exactly the same + # properties, just different names, ids, and ellipsis values + pin_data["name"] = pin_name + "1" + data.append(pin_data) + + second_pin_data = copy.deepcopy(pin_data) + second_pin_data["name"] = pin_name + "2" + second_pin_data["id"] = id + 1 + second_pin_data["ellipsis"] = 1 + data.append(second_pin_data) + else: + data.append(pin_data) + + return data + + +def build_operator( + specification, operator_name, class_name, capital_class_name, category +): + + input_pins = [] + if specification.inputs: + input_pins = build_pin_data(specification.inputs) + + output_pins = [] + if specification.outputs: + output_pins = build_pin_data(specification.outputs, output=True) + multiple_output_types = any(pin["multiple_types"] for pin in output_pins) + + docstring = build_docstring(specification) + + specification_description = "\n".join( + wrap(specification.description, subsequent_indent=" ") + ) + + date_and_time = datetime.now().strftime("%m/%d/%Y, %H:%M:%S") + + data = { + "operator_name": operator_name, + "class_name": class_name, + "capital_class_name": capital_class_name, + "docstring": docstring, + "specification_description": specification_description, + "input_pins": input_pins, + "output_pins": output_pins, + "outputs": len(output_pins) >= 1, + "multiple_output_types": multiple_output_types, + "category": category, + "date_and_time": date_and_time, + } + + this_path = os.path.dirname(os.path.abspath(__file__)) + mustache_file = os.path.join(this_path, "operator.mustache") + with open(mustache_file, "r") as f: + cls = chevron.render(f, data) + + return black.format_str(cls, mode=black.FileMode()) + + +if __name__ == "__main__": + this_path = os.path.dirname(os.path.abspath(__file__)) + + available_operators = available_operator_names() + + succeeded = 0 + for operator_name in available_operators: + specification = dpf.Operator.operator_specification(operator_name) + + category = specification.properties.get("category", "") + if not category: + raise (f"Category not defined for operator {operator_name}.") + scripting_name = specification.properties.get("scripting_name", "") + + # Make directory for new category + category_path = os.path.join(this_path, category) + if not os.path.exists(category_path): + os.mkdir(category_path) + + # Clean up scripting name + if scripting_name == "": + scripting_name = operator_name + if "::" in scripting_name: + scripting_name = scripting_name.split("::")[-1] + if "." in scripting_name: + scripting_name = scripting_name.split(".")[-1] + + # Get python class name from scripting name + capital_class_name = common._snake_to_camel_case(scripting_name) + + # Write to operator file + operator_file = os.path.join(category_path, scripting_name + ".py") + with open(operator_file, "w") as f: + try: + operator_str = build_operator( + specification, + operator_name, + scripting_name, + capital_class_name, + category, + ) + exec(operator_str) + f.write(operator_str) + succeeded += 1 + except SyntaxError as e: + error_message = ( + f"Unable to generate {operator_name}, {scripting_name}, {capital_class_name}.\n" + f"Error message: {e}\n" + ) + with open(os.path.join(this_path, "failures.txt"), "w") as error_file: + error_file.write(error_message) + error_file.write(f"Class: {operator_str}") + print(error_message) + + print(f"Generated {succeeded} out of {len(available_operators)}") + dpf.SERVER.shutdown() diff --git a/ansys/dpf/core/operators/filter/field_band_pass.py b/ansys/dpf/core/operators/filter/field_band_pass.py index 4daad15420c..af64bdf913e 100644 --- a/ansys/dpf/core/operators/filter/field_band_pass.py +++ b/ansys/dpf/core/operators/filter/field_band_pass.py @@ -1,72 +1,133 @@ """ field_band_pass =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class field_band_pass(Operator): - """The band pass filter returns all the values strictly superior to the min threshold value and stricly inferior to the max threshold value in input. - - available inputs: - - field (Field, FieldsContainer) - - min_threshold (float, Field) - - max_threshold (float, Field) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.field_band_pass() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_min_threshold = float() - >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> my_max_threshold = float() - >>> op.inputs.max_threshold.connect(my_max_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.field_band_pass(field=my_field,min_threshold=my_min_threshold,max_threshold=my_max_threshold) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, min_threshold=None, max_threshold=None, config=None, server=None): - super().__init__(name="core::field::band_pass", config = config, server = server) + """The band pass filter returns all the values strictly superior to the + min threshold value and strictly inferior to the max threshold + value in input. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + min_threshold : float or Field + A min threshold scalar or a field containing + one value is expected + max_threshold : float or Field + A max threshold scalar or a field containing + one value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.field_band_pass() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_min_threshold = float() + >>> op.inputs.min_threshold.connect(my_min_threshold) + >>> my_max_threshold = float() + >>> op.inputs.max_threshold.connect(my_max_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.field_band_pass( + ... field=my_field, + ... min_threshold=my_min_threshold, + ... max_threshold=my_max_threshold, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + field=None, + min_threshold=None, + max_threshold=None, + config=None, + server=None, + ): + super().__init__(name="core::field::band_pass", config=config, server=server) self._inputs = InputsFieldBandPass(self) self._outputs = OutputsFieldBandPass(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if min_threshold !=None: + if min_threshold is not None: self.inputs.min_threshold.connect(min_threshold) - if max_threshold !=None: + if max_threshold is not None: self.inputs.max_threshold.connect(max_threshold) @staticmethod def _spec(): - spec = Specification(description="""The band pass filter returns all the values strictly superior to the min threshold value and stricly inferior to the max threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "min_threshold", type_names=["double","field"], optional=False, document="""a min threshold scalar or a field containing one value is expected"""), - 2 : PinSpecification(name = "max_threshold", type_names=["double","field"], optional=False, document="""a max threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """The band pass filter returns all the values strictly superior to the + min threshold value and strictly inferior to the max + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="min_threshold", + type_names=["double", "field"], + optional=False, + document="""A min threshold scalar or a field containing + one value is expected""", + ), + 2: PinSpecification( + name="max_threshold", + type_names=["double", "field"], + optional=False, + document="""A max threshold scalar or a field containing + one value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::field::band_pass") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::field::band_pass", server=server) @property def inputs(self): @@ -74,145 +135,140 @@ def inputs(self): Returns -------- - inputs : InputsFieldBandPass + inputs : InputsFieldBandPass """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFieldBandPass + outputs : OutputsFieldBandPass """ return super().outputs -#internal name: core::field::band_pass -#scripting name: field_band_pass class InputsFieldBandPass(_Inputs): - """Intermediate class used to connect user inputs to field_band_pass operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_band_pass() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_min_threshold = float() - >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> my_max_threshold = float() - >>> op.inputs.max_threshold.connect(my_max_threshold) + """Intermediate class used to connect user inputs to + field_band_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_band_pass() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_min_threshold = float() + >>> op.inputs.min_threshold.connect(my_min_threshold) + >>> my_max_threshold = float() + >>> op.inputs.max_threshold.connect(my_max_threshold) """ + def __init__(self, op: Operator): super().__init__(field_band_pass._spec().inputs, op) - self._field = Input(field_band_pass._spec().input_pin(0), 0, op, -1) + self._field = Input(field_band_pass._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._min_threshold = Input(field_band_pass._spec().input_pin(1), 1, op, -1) + self._min_threshold = Input(field_band_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._min_threshold) - self._max_threshold = Input(field_band_pass._spec().input_pin(2), 2, op, -1) + self._max_threshold = Input(field_band_pass._spec().input_pin(2), 2, op, -1) self._inputs.append(self._max_threshold) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_band_pass() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def min_threshold(self): - """Allows to connect min_threshold input to the operator + """Allows to connect min_threshold input to the operator. - - pindoc: a min threshold scalar or a field containing one value is expected + A min threshold scalar or a field containing + one value is expected Parameters ---------- - my_min_threshold : float, Field, + my_min_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_band_pass() >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> #or + >>> # or >>> op.inputs.min_threshold(my_min_threshold) - """ return self._min_threshold @property def max_threshold(self): - """Allows to connect max_threshold input to the operator + """Allows to connect max_threshold input to the operator. - - pindoc: a max threshold scalar or a field containing one value is expected + A max threshold scalar or a field containing + one value is expected Parameters ---------- - my_max_threshold : float, Field, + my_max_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_band_pass() >>> op.inputs.max_threshold.connect(my_max_threshold) - >>> #or + >>> # or >>> op.inputs.max_threshold(my_max_threshold) - """ return self._max_threshold + class OutputsFieldBandPass(_Outputs): - """Intermediate class used to get outputs from field_band_pass operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_band_pass() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + field_band_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_band_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(field_band_pass._spec().outputs, op) - self._field = Output(field_band_pass._spec().output_pin(0), 0, op) + self._field = Output(field_band_pass._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_band_pass() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/filter/field_band_pass_fc.py b/ansys/dpf/core/operators/filter/field_band_pass_fc.py index c7bf9c03608..9677105975e 100644 --- a/ansys/dpf/core/operators/filter/field_band_pass_fc.py +++ b/ansys/dpf/core/operators/filter/field_band_pass_fc.py @@ -1,72 +1,133 @@ """ field_band_pass_fc -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class field_band_pass_fc(Operator): - """The band pass filter returns all the values strictly superior to the min threshold value and stricly inferior to the max threshold value in input. - - available inputs: - - fields_container (FieldsContainer) - - min_threshold (float, Field) - - max_threshold (float, Field) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.field_band_pass_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_min_threshold = float() - >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> my_max_threshold = float() - >>> op.inputs.max_threshold.connect(my_max_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.field_band_pass_fc(fields_container=my_fields_container,min_threshold=my_min_threshold,max_threshold=my_max_threshold) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, min_threshold=None, max_threshold=None, config=None, server=None): - super().__init__(name="core::field::band_pass_fc", config = config, server = server) + """The band pass filter returns all the values strictly superior to the + min threshold value and strictly inferior to the max threshold + value in input. + + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + min_threshold : float or Field + A min threshold scalar or a field containing + one value is expected + max_threshold : float or Field + A max threshold scalar or a field containing + one value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.field_band_pass_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_min_threshold = float() + >>> op.inputs.min_threshold.connect(my_min_threshold) + >>> my_max_threshold = float() + >>> op.inputs.max_threshold.connect(my_max_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.field_band_pass_fc( + ... fields_container=my_fields_container, + ... min_threshold=my_min_threshold, + ... max_threshold=my_max_threshold, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + min_threshold=None, + max_threshold=None, + config=None, + server=None, + ): + super().__init__(name="core::field::band_pass_fc", config=config, server=server) self._inputs = InputsFieldBandPassFc(self) self._outputs = OutputsFieldBandPassFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if min_threshold !=None: + if min_threshold is not None: self.inputs.min_threshold.connect(min_threshold) - if max_threshold !=None: + if max_threshold is not None: self.inputs.max_threshold.connect(max_threshold) @staticmethod def _spec(): - spec = Specification(description="""The band pass filter returns all the values strictly superior to the min threshold value and stricly inferior to the max threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "min_threshold", type_names=["double","field"], optional=False, document="""a min threshold scalar or a field containing one value is expected"""), - 2 : PinSpecification(name = "max_threshold", type_names=["double","field"], optional=False, document="""a max threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """The band pass filter returns all the values strictly superior to the + min threshold value and strictly inferior to the max + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="min_threshold", + type_names=["double", "field"], + optional=False, + document="""A min threshold scalar or a field containing + one value is expected""", + ), + 2: PinSpecification( + name="max_threshold", + type_names=["double", "field"], + optional=False, + document="""A max threshold scalar or a field containing + one value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::field::band_pass_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::field::band_pass_fc", server=server) @property def inputs(self): @@ -74,145 +135,142 @@ def inputs(self): Returns -------- - inputs : InputsFieldBandPassFc + inputs : InputsFieldBandPassFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFieldBandPassFc + outputs : OutputsFieldBandPassFc """ return super().outputs -#internal name: core::field::band_pass_fc -#scripting name: field_band_pass_fc class InputsFieldBandPassFc(_Inputs): - """Intermediate class used to connect user inputs to field_band_pass_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_band_pass_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_min_threshold = float() - >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> my_max_threshold = float() - >>> op.inputs.max_threshold.connect(my_max_threshold) + """Intermediate class used to connect user inputs to + field_band_pass_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_band_pass_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_min_threshold = float() + >>> op.inputs.min_threshold.connect(my_min_threshold) + >>> my_max_threshold = float() + >>> op.inputs.max_threshold.connect(my_max_threshold) """ + def __init__(self, op: Operator): super().__init__(field_band_pass_fc._spec().inputs, op) - self._fields_container = Input(field_band_pass_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + field_band_pass_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._min_threshold = Input(field_band_pass_fc._spec().input_pin(1), 1, op, -1) + self._min_threshold = Input(field_band_pass_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._min_threshold) - self._max_threshold = Input(field_band_pass_fc._spec().input_pin(2), 2, op, -1) + self._max_threshold = Input(field_band_pass_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._max_threshold) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_band_pass_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def min_threshold(self): - """Allows to connect min_threshold input to the operator + """Allows to connect min_threshold input to the operator. - - pindoc: a min threshold scalar or a field containing one value is expected + A min threshold scalar or a field containing + one value is expected Parameters ---------- - my_min_threshold : float, Field, + my_min_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_band_pass_fc() >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> #or + >>> # or >>> op.inputs.min_threshold(my_min_threshold) - """ return self._min_threshold @property def max_threshold(self): - """Allows to connect max_threshold input to the operator + """Allows to connect max_threshold input to the operator. - - pindoc: a max threshold scalar or a field containing one value is expected + A max threshold scalar or a field containing + one value is expected Parameters ---------- - my_max_threshold : float, Field, + my_max_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_band_pass_fc() >>> op.inputs.max_threshold.connect(my_max_threshold) - >>> #or + >>> # or >>> op.inputs.max_threshold(my_max_threshold) - """ return self._max_threshold + class OutputsFieldBandPassFc(_Outputs): - """Intermediate class used to get outputs from field_band_pass_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_band_pass_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + field_band_pass_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_band_pass_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(field_band_pass_fc._spec().outputs, op) - self._fields_container = Output(field_band_pass_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(field_band_pass_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_band_pass_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/filter/field_high_pass.py b/ansys/dpf/core/operators/filter/field_high_pass.py index 1570ca1640d..8dce1230e35 100644 --- a/ansys/dpf/core/operators/filter/field_high_pass.py +++ b/ansys/dpf/core/operators/filter/field_high_pass.py @@ -1,66 +1,109 @@ """ field_high_pass =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class field_high_pass(Operator): - """The high pass filter returns all the values strictly superior to the threshold value in input. - - available inputs: - - field (Field, FieldsContainer) - - threshold (float, Field) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.field_high_pass() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.field_high_pass(field=my_field,threshold=my_threshold) + """The high pass filter returns all the values strictly superior to the + threshold value in input. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + threshold : float or Field + A threshold scalar or a field containing one + value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.field_high_pass() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.field_high_pass( + ... field=my_field, + ... threshold=my_threshold, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, threshold=None, config=None, server=None): - super().__init__(name="core::field::high_pass", config = config, server = server) + super().__init__(name="core::field::high_pass", config=config, server=server) self._inputs = InputsFieldHighPass(self) self._outputs = OutputsFieldHighPass(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if threshold !=None: + if threshold is not None: self.inputs.threshold.connect(threshold) @staticmethod def _spec(): - spec = Specification(description="""The high pass filter returns all the values strictly superior to the threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "threshold", type_names=["double","field"], optional=False, document="""a threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """The high pass filter returns all the values strictly superior to the + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="threshold", + type_names=["double", "field"], + optional=False, + document="""A threshold scalar or a field containing one + value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::field::high_pass") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::field::high_pass", server=server) @property def inputs(self): @@ -68,119 +111,115 @@ def inputs(self): Returns -------- - inputs : InputsFieldHighPass + inputs : InputsFieldHighPass """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFieldHighPass + outputs : OutputsFieldHighPass """ return super().outputs -#internal name: core::field::high_pass -#scripting name: field_high_pass class InputsFieldHighPass(_Inputs): - """Intermediate class used to connect user inputs to field_high_pass operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_high_pass() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) + """Intermediate class used to connect user inputs to + field_high_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_high_pass() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) """ + def __init__(self, op: Operator): super().__init__(field_high_pass._spec().inputs, op) - self._field = Input(field_high_pass._spec().input_pin(0), 0, op, -1) + self._field = Input(field_high_pass._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._threshold = Input(field_high_pass._spec().input_pin(1), 1, op, -1) + self._threshold = Input(field_high_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_high_pass() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def threshold(self): - """Allows to connect threshold input to the operator + """Allows to connect threshold input to the operator. - - pindoc: a threshold scalar or a field containing one value is expected + A threshold scalar or a field containing one + value is expected Parameters ---------- - my_threshold : float, Field, + my_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_high_pass() >>> op.inputs.threshold.connect(my_threshold) - >>> #or + >>> # or >>> op.inputs.threshold(my_threshold) - """ return self._threshold + class OutputsFieldHighPass(_Outputs): - """Intermediate class used to get outputs from field_high_pass operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_high_pass() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + field_high_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_high_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(field_high_pass._spec().outputs, op) - self._field = Output(field_high_pass._spec().output_pin(0), 0, op) + self._field = Output(field_high_pass._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_high_pass() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/filter/field_high_pass_fc.py b/ansys/dpf/core/operators/filter/field_high_pass_fc.py index a4e7fb6deea..dfa86319a12 100644 --- a/ansys/dpf/core/operators/filter/field_high_pass_fc.py +++ b/ansys/dpf/core/operators/filter/field_high_pass_fc.py @@ -1,66 +1,109 @@ """ field_high_pass_fc -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class field_high_pass_fc(Operator): - """The high pass filter returns all the values strictly superior to the threshold value in input. - - available inputs: - - fields_container (FieldsContainer) - - threshold (float, Field) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.field_high_pass_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.field_high_pass_fc(fields_container=my_fields_container,threshold=my_threshold) + """The high pass filter returns all the values strictly superior to the + threshold value in input. + + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + threshold : float or Field + A threshold scalar or a field containing one + value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.field_high_pass_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.field_high_pass_fc( + ... fields_container=my_fields_container, + ... threshold=my_threshold, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, threshold=None, config=None, server=None): - super().__init__(name="core::field::high_pass_fc", config = config, server = server) + super().__init__(name="core::field::high_pass_fc", config=config, server=server) self._inputs = InputsFieldHighPassFc(self) self._outputs = OutputsFieldHighPassFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if threshold !=None: + if threshold is not None: self.inputs.threshold.connect(threshold) @staticmethod def _spec(): - spec = Specification(description="""The high pass filter returns all the values strictly superior to the threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "threshold", type_names=["double","field"], optional=False, document="""a threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """The high pass filter returns all the values strictly superior to the + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="threshold", + type_names=["double", "field"], + optional=False, + document="""A threshold scalar or a field containing one + value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::field::high_pass_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::field::high_pass_fc", server=server) @property def inputs(self): @@ -68,119 +111,117 @@ def inputs(self): Returns -------- - inputs : InputsFieldHighPassFc + inputs : InputsFieldHighPassFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFieldHighPassFc + outputs : OutputsFieldHighPassFc """ return super().outputs -#internal name: core::field::high_pass_fc -#scripting name: field_high_pass_fc class InputsFieldHighPassFc(_Inputs): - """Intermediate class used to connect user inputs to field_high_pass_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_high_pass_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) + """Intermediate class used to connect user inputs to + field_high_pass_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_high_pass_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) """ + def __init__(self, op: Operator): super().__init__(field_high_pass_fc._spec().inputs, op) - self._fields_container = Input(field_high_pass_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + field_high_pass_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._threshold = Input(field_high_pass_fc._spec().input_pin(1), 1, op, -1) + self._threshold = Input(field_high_pass_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_high_pass_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def threshold(self): - """Allows to connect threshold input to the operator + """Allows to connect threshold input to the operator. - - pindoc: a threshold scalar or a field containing one value is expected + A threshold scalar or a field containing one + value is expected Parameters ---------- - my_threshold : float, Field, + my_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_high_pass_fc() >>> op.inputs.threshold.connect(my_threshold) - >>> #or + >>> # or >>> op.inputs.threshold(my_threshold) - """ return self._threshold + class OutputsFieldHighPassFc(_Outputs): - """Intermediate class used to get outputs from field_high_pass_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_high_pass_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + field_high_pass_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_high_pass_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(field_high_pass_fc._spec().outputs, op) - self._fields_container = Output(field_high_pass_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(field_high_pass_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_high_pass_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/filter/field_low_pass.py b/ansys/dpf/core/operators/filter/field_low_pass.py index 9717de28bb5..7ec00e3b374 100644 --- a/ansys/dpf/core/operators/filter/field_low_pass.py +++ b/ansys/dpf/core/operators/filter/field_low_pass.py @@ -1,66 +1,109 @@ """ field_low_pass -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class field_low_pass(Operator): - """The low pass filter returns all the values strictly inferior to the threshold value in input. - - available inputs: - - field (Field, FieldsContainer) - - threshold (float, Field) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.field_low_pass() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.field_low_pass(field=my_field,threshold=my_threshold) + """The low pass filter returns all the values strictly inferior to the + threshold value in input. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + threshold : float or Field + A threshold scalar or a field containing one + value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.field_low_pass() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.field_low_pass( + ... field=my_field, + ... threshold=my_threshold, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, threshold=None, config=None, server=None): - super().__init__(name="core::field::low_pass", config = config, server = server) + super().__init__(name="core::field::low_pass", config=config, server=server) self._inputs = InputsFieldLowPass(self) self._outputs = OutputsFieldLowPass(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if threshold !=None: + if threshold is not None: self.inputs.threshold.connect(threshold) @staticmethod def _spec(): - spec = Specification(description="""The low pass filter returns all the values strictly inferior to the threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "threshold", type_names=["double","field"], optional=False, document="""a threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """The low pass filter returns all the values strictly inferior to the + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="threshold", + type_names=["double", "field"], + optional=False, + document="""A threshold scalar or a field containing one + value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::field::low_pass") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::field::low_pass", server=server) @property def inputs(self): @@ -68,119 +111,115 @@ def inputs(self): Returns -------- - inputs : InputsFieldLowPass + inputs : InputsFieldLowPass """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFieldLowPass + outputs : OutputsFieldLowPass """ return super().outputs -#internal name: core::field::low_pass -#scripting name: field_low_pass class InputsFieldLowPass(_Inputs): - """Intermediate class used to connect user inputs to field_low_pass operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_low_pass() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) + """Intermediate class used to connect user inputs to + field_low_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_low_pass() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) """ + def __init__(self, op: Operator): super().__init__(field_low_pass._spec().inputs, op) - self._field = Input(field_low_pass._spec().input_pin(0), 0, op, -1) + self._field = Input(field_low_pass._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._threshold = Input(field_low_pass._spec().input_pin(1), 1, op, -1) + self._threshold = Input(field_low_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_low_pass() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def threshold(self): - """Allows to connect threshold input to the operator + """Allows to connect threshold input to the operator. - - pindoc: a threshold scalar or a field containing one value is expected + A threshold scalar or a field containing one + value is expected Parameters ---------- - my_threshold : float, Field, + my_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_low_pass() >>> op.inputs.threshold.connect(my_threshold) - >>> #or + >>> # or >>> op.inputs.threshold(my_threshold) - """ return self._threshold + class OutputsFieldLowPass(_Outputs): - """Intermediate class used to get outputs from field_low_pass operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_low_pass() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + field_low_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_low_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(field_low_pass._spec().outputs, op) - self._field = Output(field_low_pass._spec().output_pin(0), 0, op) + self._field = Output(field_low_pass._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_low_pass() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/filter/field_low_pass_fc.py b/ansys/dpf/core/operators/filter/field_low_pass_fc.py index 5cfdf6fa079..76002561682 100644 --- a/ansys/dpf/core/operators/filter/field_low_pass_fc.py +++ b/ansys/dpf/core/operators/filter/field_low_pass_fc.py @@ -1,66 +1,109 @@ """ field_low_pass_fc -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class field_low_pass_fc(Operator): - """The low pass filter returns all the values strictly inferior to the threshold value in input. - - available inputs: - - fields_container (FieldsContainer) - - threshold (float, Field) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.field_low_pass_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.field_low_pass_fc(fields_container=my_fields_container,threshold=my_threshold) + """The low pass filter returns all the values strictly inferior to the + threshold value in input. + + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + threshold : float or Field + A threshold scalar or a field containing one + value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.field_low_pass_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.field_low_pass_fc( + ... fields_container=my_fields_container, + ... threshold=my_threshold, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, threshold=None, config=None, server=None): - super().__init__(name="core::field::low_pass_fc", config = config, server = server) + super().__init__(name="core::field::low_pass_fc", config=config, server=server) self._inputs = InputsFieldLowPassFc(self) self._outputs = OutputsFieldLowPassFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if threshold !=None: + if threshold is not None: self.inputs.threshold.connect(threshold) @staticmethod def _spec(): - spec = Specification(description="""The low pass filter returns all the values strictly inferior to the threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "threshold", type_names=["double","field"], optional=False, document="""a threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """The low pass filter returns all the values strictly inferior to the + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="threshold", + type_names=["double", "field"], + optional=False, + document="""A threshold scalar or a field containing one + value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::field::low_pass_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::field::low_pass_fc", server=server) @property def inputs(self): @@ -68,119 +111,117 @@ def inputs(self): Returns -------- - inputs : InputsFieldLowPassFc + inputs : InputsFieldLowPassFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFieldLowPassFc + outputs : OutputsFieldLowPassFc """ return super().outputs -#internal name: core::field::low_pass_fc -#scripting name: field_low_pass_fc class InputsFieldLowPassFc(_Inputs): - """Intermediate class used to connect user inputs to field_low_pass_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_low_pass_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) + """Intermediate class used to connect user inputs to + field_low_pass_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_low_pass_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) """ + def __init__(self, op: Operator): super().__init__(field_low_pass_fc._spec().inputs, op) - self._fields_container = Input(field_low_pass_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + field_low_pass_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._threshold = Input(field_low_pass_fc._spec().input_pin(1), 1, op, -1) + self._threshold = Input(field_low_pass_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_low_pass_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def threshold(self): - """Allows to connect threshold input to the operator + """Allows to connect threshold input to the operator. - - pindoc: a threshold scalar or a field containing one value is expected + A threshold scalar or a field containing one + value is expected Parameters ---------- - my_threshold : float, Field, + my_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_low_pass_fc() >>> op.inputs.threshold.connect(my_threshold) - >>> #or + >>> # or >>> op.inputs.threshold(my_threshold) - """ return self._threshold + class OutputsFieldLowPassFc(_Outputs): - """Intermediate class used to get outputs from field_low_pass_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.field_low_pass_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + field_low_pass_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_low_pass_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(field_low_pass_fc._spec().outputs, op) - self._fields_container = Output(field_low_pass_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(field_low_pass_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.field_low_pass_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/filter/scoping_band_pass.py b/ansys/dpf/core/operators/filter/scoping_band_pass.py index ad8761aa304..6600d9030a2 100644 --- a/ansys/dpf/core/operators/filter/scoping_band_pass.py +++ b/ansys/dpf/core/operators/filter/scoping_band_pass.py @@ -1,72 +1,133 @@ """ scoping_band_pass -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class scoping_band_pass(Operator): - """The band pass filter returns all the values strictly superior to the min threshold value and stricly inferior to the max threshold value in input. - - available inputs: - - field (Field, FieldsContainer) - - min_threshold (float, Field) - - max_threshold (float, Field) - - available outputs: - - scoping (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.scoping_band_pass() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_min_threshold = float() - >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> my_max_threshold = float() - >>> op.inputs.max_threshold.connect(my_max_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.scoping_band_pass(field=my_field,min_threshold=my_min_threshold,max_threshold=my_max_threshold) - - >>> # Get output data - >>> result_scoping = op.outputs.scoping()""" - def __init__(self, field=None, min_threshold=None, max_threshold=None, config=None, server=None): - super().__init__(name="core::scoping::band_pass", config = config, server = server) + """The band pass filter returns all the values strictly superior to the + min threshold value and strictly inferior to the max threshold + value in input. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + min_threshold : float or Field + A min threshold scalar or a field containing + one value is expected + max_threshold : float or Field + A max threshold scalar or a field containing + one value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.scoping_band_pass() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_min_threshold = float() + >>> op.inputs.min_threshold.connect(my_min_threshold) + >>> my_max_threshold = float() + >>> op.inputs.max_threshold.connect(my_max_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.scoping_band_pass( + ... field=my_field, + ... min_threshold=my_min_threshold, + ... max_threshold=my_max_threshold, + ... ) + + >>> # Get output data + >>> result_scoping = op.outputs.scoping() + """ + + def __init__( + self, + field=None, + min_threshold=None, + max_threshold=None, + config=None, + server=None, + ): + super().__init__(name="core::scoping::band_pass", config=config, server=server) self._inputs = InputsScopingBandPass(self) self._outputs = OutputsScopingBandPass(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if min_threshold !=None: + if min_threshold is not None: self.inputs.min_threshold.connect(min_threshold) - if max_threshold !=None: + if max_threshold is not None: self.inputs.max_threshold.connect(max_threshold) @staticmethod def _spec(): - spec = Specification(description="""The band pass filter returns all the values strictly superior to the min threshold value and stricly inferior to the max threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "min_threshold", type_names=["double","field"], optional=False, document="""a min threshold scalar or a field containing one value is expected"""), - 2 : PinSpecification(name = "max_threshold", type_names=["double","field"], optional=False, document="""a max threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """The band pass filter returns all the values strictly superior to the + min threshold value and strictly inferior to the max + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="min_threshold", + type_names=["double", "field"], + optional=False, + document="""A min threshold scalar or a field containing + one value is expected""", + ), + 2: PinSpecification( + name="max_threshold", + type_names=["double", "field"], + optional=False, + document="""A max threshold scalar or a field containing + one value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::scoping::band_pass") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::scoping::band_pass", server=server) @property def inputs(self): @@ -74,145 +135,140 @@ def inputs(self): Returns -------- - inputs : InputsScopingBandPass + inputs : InputsScopingBandPass """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScopingBandPass + outputs : OutputsScopingBandPass """ return super().outputs -#internal name: core::scoping::band_pass -#scripting name: scoping_band_pass class InputsScopingBandPass(_Inputs): - """Intermediate class used to connect user inputs to scoping_band_pass operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.scoping_band_pass() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_min_threshold = float() - >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> my_max_threshold = float() - >>> op.inputs.max_threshold.connect(my_max_threshold) + """Intermediate class used to connect user inputs to + scoping_band_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.scoping_band_pass() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_min_threshold = float() + >>> op.inputs.min_threshold.connect(my_min_threshold) + >>> my_max_threshold = float() + >>> op.inputs.max_threshold.connect(my_max_threshold) """ + def __init__(self, op: Operator): super().__init__(scoping_band_pass._spec().inputs, op) - self._field = Input(scoping_band_pass._spec().input_pin(0), 0, op, -1) + self._field = Input(scoping_band_pass._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._min_threshold = Input(scoping_band_pass._spec().input_pin(1), 1, op, -1) + self._min_threshold = Input(scoping_band_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._min_threshold) - self._max_threshold = Input(scoping_band_pass._spec().input_pin(2), 2, op, -1) + self._max_threshold = Input(scoping_band_pass._spec().input_pin(2), 2, op, -1) self._inputs.append(self._max_threshold) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_band_pass() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def min_threshold(self): - """Allows to connect min_threshold input to the operator + """Allows to connect min_threshold input to the operator. - - pindoc: a min threshold scalar or a field containing one value is expected + A min threshold scalar or a field containing + one value is expected Parameters ---------- - my_min_threshold : float, Field, + my_min_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_band_pass() >>> op.inputs.min_threshold.connect(my_min_threshold) - >>> #or + >>> # or >>> op.inputs.min_threshold(my_min_threshold) - """ return self._min_threshold @property def max_threshold(self): - """Allows to connect max_threshold input to the operator + """Allows to connect max_threshold input to the operator. - - pindoc: a max threshold scalar or a field containing one value is expected + A max threshold scalar or a field containing + one value is expected Parameters ---------- - my_max_threshold : float, Field, + my_max_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_band_pass() >>> op.inputs.max_threshold.connect(my_max_threshold) - >>> #or + >>> # or >>> op.inputs.max_threshold(my_max_threshold) - """ return self._max_threshold + class OutputsScopingBandPass(_Outputs): - """Intermediate class used to get outputs from scoping_band_pass operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.scoping_band_pass() - >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() + """Intermediate class used to get outputs from + scoping_band_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.scoping_band_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_scoping = op.outputs.scoping() """ + def __init__(self, op: Operator): super().__init__(scoping_band_pass._spec().outputs, op) - self._scoping = Output(scoping_band_pass._spec().output_pin(0), 0, op) + self._scoping = Output(scoping_band_pass._spec().output_pin(0), 0, op) self._outputs.append(self._scoping) @property def scoping(self): """Allows to get scoping output of the operator - Returns ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_band_pass() >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() - """ + >>> result_scoping = op.outputs.scoping() + """ # noqa: E501 return self._scoping - diff --git a/ansys/dpf/core/operators/filter/scoping_high_pass.py b/ansys/dpf/core/operators/filter/scoping_high_pass.py index 3eddc5e660f..f7915a77df4 100644 --- a/ansys/dpf/core/operators/filter/scoping_high_pass.py +++ b/ansys/dpf/core/operators/filter/scoping_high_pass.py @@ -1,66 +1,109 @@ """ scoping_high_pass -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class scoping_high_pass(Operator): - """The high pass filter returns all the values strictly superior to the threshold value in input. - - available inputs: - - field (Field, FieldsContainer) - - threshold (float, Field) - - available outputs: - - scoping (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.scoping_high_pass() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.scoping_high_pass(field=my_field,threshold=my_threshold) + """The high pass filter returns all the values strictly superior to the + threshold value in input. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + threshold : float or Field + A threshold scalar or a field containing one + value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.scoping_high_pass() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.scoping_high_pass( + ... field=my_field, + ... threshold=my_threshold, + ... ) + + >>> # Get output data + >>> result_scoping = op.outputs.scoping() + """ - >>> # Get output data - >>> result_scoping = op.outputs.scoping()""" def __init__(self, field=None, threshold=None, config=None, server=None): - super().__init__(name="core::scoping::high_pass", config = config, server = server) + super().__init__(name="core::scoping::high_pass", config=config, server=server) self._inputs = InputsScopingHighPass(self) self._outputs = OutputsScopingHighPass(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if threshold !=None: + if threshold is not None: self.inputs.threshold.connect(threshold) @staticmethod def _spec(): - spec = Specification(description="""The high pass filter returns all the values strictly superior to the threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "threshold", type_names=["double","field"], optional=False, document="""a threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """The high pass filter returns all the values strictly superior to the + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="threshold", + type_names=["double", "field"], + optional=False, + document="""A threshold scalar or a field containing one + value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::scoping::high_pass") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::scoping::high_pass", server=server) @property def inputs(self): @@ -68,119 +111,115 @@ def inputs(self): Returns -------- - inputs : InputsScopingHighPass + inputs : InputsScopingHighPass """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScopingHighPass + outputs : OutputsScopingHighPass """ return super().outputs -#internal name: core::scoping::high_pass -#scripting name: scoping_high_pass class InputsScopingHighPass(_Inputs): - """Intermediate class used to connect user inputs to scoping_high_pass operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.scoping_high_pass() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) + """Intermediate class used to connect user inputs to + scoping_high_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.scoping_high_pass() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) """ + def __init__(self, op: Operator): super().__init__(scoping_high_pass._spec().inputs, op) - self._field = Input(scoping_high_pass._spec().input_pin(0), 0, op, -1) + self._field = Input(scoping_high_pass._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._threshold = Input(scoping_high_pass._spec().input_pin(1), 1, op, -1) + self._threshold = Input(scoping_high_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_high_pass() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def threshold(self): - """Allows to connect threshold input to the operator + """Allows to connect threshold input to the operator. - - pindoc: a threshold scalar or a field containing one value is expected + A threshold scalar or a field containing one + value is expected Parameters ---------- - my_threshold : float, Field, + my_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_high_pass() >>> op.inputs.threshold.connect(my_threshold) - >>> #or + >>> # or >>> op.inputs.threshold(my_threshold) - """ return self._threshold + class OutputsScopingHighPass(_Outputs): - """Intermediate class used to get outputs from scoping_high_pass operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.scoping_high_pass() - >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() + """Intermediate class used to get outputs from + scoping_high_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.scoping_high_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_scoping = op.outputs.scoping() """ + def __init__(self, op: Operator): super().__init__(scoping_high_pass._spec().outputs, op) - self._scoping = Output(scoping_high_pass._spec().output_pin(0), 0, op) + self._scoping = Output(scoping_high_pass._spec().output_pin(0), 0, op) self._outputs.append(self._scoping) @property def scoping(self): """Allows to get scoping output of the operator - Returns ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_high_pass() >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() - """ + >>> result_scoping = op.outputs.scoping() + """ # noqa: E501 return self._scoping - diff --git a/ansys/dpf/core/operators/filter/scoping_low_pass.py b/ansys/dpf/core/operators/filter/scoping_low_pass.py index fe1e2b4f49c..432d7bec7ae 100644 --- a/ansys/dpf/core/operators/filter/scoping_low_pass.py +++ b/ansys/dpf/core/operators/filter/scoping_low_pass.py @@ -1,66 +1,109 @@ """ scoping_low_pass -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "filter" category -""" class scoping_low_pass(Operator): - """The low pass filter returns all the values strictly inferior to the threshold value in input. - - available inputs: - - field (Field, FieldsContainer) - - threshold (float, Field) - - available outputs: - - scoping (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.filter.scoping_low_pass() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.filter.scoping_low_pass(field=my_field,threshold=my_threshold) + """The low pass filter returns all the values strictly inferior to the + threshold value in input. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + threshold : float or Field + A threshold scalar or a field containing one + value is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.scoping_low_pass() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.scoping_low_pass( + ... field=my_field, + ... threshold=my_threshold, + ... ) + + >>> # Get output data + >>> result_scoping = op.outputs.scoping() + """ - >>> # Get output data - >>> result_scoping = op.outputs.scoping()""" def __init__(self, field=None, threshold=None, config=None, server=None): - super().__init__(name="core::scoping::low_pass", config = config, server = server) + super().__init__(name="core::scoping::low_pass", config=config, server=server) self._inputs = InputsScopingLowPass(self) self._outputs = OutputsScopingLowPass(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if threshold !=None: + if threshold is not None: self.inputs.threshold.connect(threshold) @staticmethod def _spec(): - spec = Specification(description="""The low pass filter returns all the values strictly inferior to the threshold value in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "threshold", type_names=["double","field"], optional=False, document="""a threshold scalar or a field containing one value is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """The low pass filter returns all the values strictly inferior to the + threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="threshold", + type_names=["double", "field"], + optional=False, + document="""A threshold scalar or a field containing one + value is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "core::scoping::low_pass") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="core::scoping::low_pass", server=server) @property def inputs(self): @@ -68,119 +111,115 @@ def inputs(self): Returns -------- - inputs : InputsScopingLowPass + inputs : InputsScopingLowPass """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScopingLowPass + outputs : OutputsScopingLowPass """ return super().outputs -#internal name: core::scoping::low_pass -#scripting name: scoping_low_pass class InputsScopingLowPass(_Inputs): - """Intermediate class used to connect user inputs to scoping_low_pass operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.scoping_low_pass() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_threshold = float() - >>> op.inputs.threshold.connect(my_threshold) + """Intermediate class used to connect user inputs to + scoping_low_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.scoping_low_pass() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) """ + def __init__(self, op: Operator): super().__init__(scoping_low_pass._spec().inputs, op) - self._field = Input(scoping_low_pass._spec().input_pin(0), 0, op, -1) + self._field = Input(scoping_low_pass._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._threshold = Input(scoping_low_pass._spec().input_pin(1), 1, op, -1) + self._threshold = Input(scoping_low_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_low_pass() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def threshold(self): - """Allows to connect threshold input to the operator + """Allows to connect threshold input to the operator. - - pindoc: a threshold scalar or a field containing one value is expected + A threshold scalar or a field containing one + value is expected Parameters ---------- - my_threshold : float, Field, + my_threshold : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_low_pass() >>> op.inputs.threshold.connect(my_threshold) - >>> #or + >>> # or >>> op.inputs.threshold(my_threshold) - """ return self._threshold + class OutputsScopingLowPass(_Outputs): - """Intermediate class used to get outputs from scoping_low_pass operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.filter.scoping_low_pass() - >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() + """Intermediate class used to get outputs from + scoping_low_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.scoping_low_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_scoping = op.outputs.scoping() """ + def __init__(self, op: Operator): super().__init__(scoping_low_pass._spec().outputs, op) - self._scoping = Output(scoping_low_pass._spec().output_pin(0), 0, op) + self._scoping = Output(scoping_low_pass._spec().output_pin(0), 0, op) self._outputs.append(self._scoping) @property def scoping(self): """Allows to get scoping output of the operator - Returns ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.filter.scoping_low_pass() >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() - """ + >>> result_scoping = op.outputs.scoping() + """ # noqa: E501 return self._scoping - diff --git a/ansys/dpf/core/operators/geo/center_of_gravity.py b/ansys/dpf/core/operators/geo/center_of_gravity.py index a3947df8432..0bec1d15dca 100644 --- a/ansys/dpf/core/operators/geo/center_of_gravity.py +++ b/ansys/dpf/core/operators/geo/center_of_gravity.py @@ -1,75 +1,132 @@ """ center_of_gravity -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class center_of_gravity(Operator): """Compute the center of gravity of a set of elements - available inputs: - - mesh (MeshedRegion) (optional) - - mesh_scoping (Scoping) (optional) - - field (Field) (optional) - - available outputs: - - field (Field) - - mesh (MeshedRegion) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.center_of_gravity() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.center_of_gravity(mesh=my_mesh,mesh_scoping=my_mesh_scoping,field=my_field) - - >>> # Get output data - >>> result_field = op.outputs.field() - >>> result_mesh = op.outputs.mesh()""" - def __init__(self, mesh=None, mesh_scoping=None, field=None, config=None, server=None): - super().__init__(name="topology::center_of_gravity", config = config, server = server) + Parameters + ---------- + mesh : MeshedRegion, optional + mesh_scoping : Scoping, optional + Mesh scoping, if not set, all the elements of + the mesh are considered. + field : Field, optional + Elemental or nodal ponderation used in + computation. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.center_of_gravity() + + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.center_of_gravity( + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... field=my_field, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + >>> result_mesh = op.outputs.mesh() + """ + + def __init__( + self, mesh=None, mesh_scoping=None, field=None, config=None, server=None + ): + super().__init__( + name="topology::center_of_gravity", config=config, server=server + ) self._inputs = InputsCenterOfGravity(self) self._outputs = OutputsCenterOfGravity(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Compute the center of gravity of a set of elements""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""Mesh scoping, if not set, all the elements of the mesh are considered."""), - 2 : PinSpecification(name = "field", type_names=["field"], optional=True, document="""Elemental or nodal ponderation used in computation.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""Center of gravity as a mesh""")}) + description = """Compute the center of gravity of a set of elements""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Mesh scoping, if not set, all the elements of + the mesh are considered.""", + ), + 2: PinSpecification( + name="field", + type_names=["field"], + optional=True, + document="""Elemental or nodal ponderation used in + computation.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""Center of gravity as a mesh""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "topology::center_of_gravity") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="topology::center_of_gravity", server=server + ) @property def inputs(self): @@ -77,167 +134,157 @@ def inputs(self): Returns -------- - inputs : InputsCenterOfGravity + inputs : InputsCenterOfGravity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCenterOfGravity + outputs : OutputsCenterOfGravity """ return super().outputs -#internal name: topology::center_of_gravity -#scripting name: center_of_gravity class InputsCenterOfGravity(_Inputs): - """Intermediate class used to connect user inputs to center_of_gravity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.center_of_gravity() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + center_of_gravity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.center_of_gravity() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(center_of_gravity._spec().inputs, op) - self._mesh = Input(center_of_gravity._spec().input_pin(0), 0, op, -1) + self._mesh = Input(center_of_gravity._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(center_of_gravity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(center_of_gravity._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._field = Input(center_of_gravity._spec().input_pin(2), 2, op, -1) + self._field = Input(center_of_gravity._spec().input_pin(2), 2, op, -1) self._inputs.append(self._field) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: Mesh scoping, if not set, all the elements of the mesh are considered. + Mesh scoping, if not set, all the elements of + the mesh are considered. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: Elemental or nodal ponderation used in computation. + Elemental or nodal ponderation used in + computation. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsCenterOfGravity(_Outputs): - """Intermediate class used to get outputs from center_of_gravity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.center_of_gravity() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - >>> result_mesh = op.outputs.mesh() + """Intermediate class used to get outputs from + center_of_gravity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.center_of_gravity() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + >>> result_mesh = op.outputs.mesh() """ + def __init__(self, op: Operator): super().__init__(center_of_gravity._spec().outputs, op) - self._field = Output(center_of_gravity._spec().output_pin(0), 0, op) + self._field = Output(center_of_gravity._spec().output_pin(0), 0, op) self._outputs.append(self._field) - self._mesh = Output(center_of_gravity._spec().output_pin(1), 1, op) + self._mesh = Output(center_of_gravity._spec().output_pin(1), 1, op) self._outputs.append(self._mesh) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field @property def mesh(self): """Allows to get mesh output of the operator - - - pindoc: Center of gravity as a mesh - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh - diff --git a/ansys/dpf/core/operators/geo/element_nodal_contribution.py b/ansys/dpf/core/operators/geo/element_nodal_contribution.py index 650ddf387c7..18b9118416f 100644 --- a/ansys/dpf/core/operators/geo/element_nodal_contribution.py +++ b/ansys/dpf/core/operators/geo/element_nodal_contribution.py @@ -1,72 +1,131 @@ """ element_nodal_contribution -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class element_nodal_contribution(Operator): - """Compute the fraction of volume attributed to each node of each element. - - available inputs: - - mesh (MeshedRegion) - - scoping (Scoping) (optional) - - volume_fraction (bool) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.element_nodal_contribution() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_volume_fraction = bool() - >>> op.inputs.volume_fraction.connect(my_volume_fraction) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.element_nodal_contribution(mesh=my_mesh,scoping=my_scoping,volume_fraction=my_volume_fraction) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, mesh=None, scoping=None, volume_fraction=None, config=None, server=None): - super().__init__(name="element::nodal_contribution", config = config, server = server) + """Compute the fraction of volume attributed to each node of each + element. + + Parameters + ---------- + mesh : MeshedRegion + scoping : Scoping, optional + Integrate the input field over a specific + scoping. + volume_fraction : bool, optional + If true, returns influence volume, if false, + return influence volume fraction + (i.e. integrated value of shape + function for each node). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.element_nodal_contribution() + + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_volume_fraction = bool() + >>> op.inputs.volume_fraction.connect(my_volume_fraction) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.element_nodal_contribution( + ... mesh=my_mesh, + ... scoping=my_scoping, + ... volume_fraction=my_volume_fraction, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, mesh=None, scoping=None, volume_fraction=None, config=None, server=None + ): + super().__init__( + name="element::nodal_contribution", config=config, server=server + ) self._inputs = InputsElementNodalContribution(self) self._outputs = OutputsElementNodalContribution(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if volume_fraction !=None: + if volume_fraction is not None: self.inputs.volume_fraction.connect(volume_fraction) @staticmethod def _spec(): - spec = Specification(description="""Compute the fraction of volume attributed to each node of each element.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document="""Integrate the input field over a specific scoping."""), - 2 : PinSpecification(name = "volume_fraction", type_names=["bool"], optional=True, document="""if true, returns influence volume, if false, return influence volume fraction (i.e. integrated value of shape function for each node).""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the fraction of volume attributed to each node of each + element.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""Integrate the input field over a specific + scoping.""", + ), + 2: PinSpecification( + name="volume_fraction", + type_names=["bool"], + optional=True, + document="""If true, returns influence volume, if false, + return influence volume fraction + (i.e. integrated value of shape + function for each node).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "element::nodal_contribution") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="element::nodal_contribution", server=server + ) @property def inputs(self): @@ -74,143 +133,143 @@ def inputs(self): Returns -------- - inputs : InputsElementNodalContribution + inputs : InputsElementNodalContribution """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementNodalContribution + outputs : OutputsElementNodalContribution """ return super().outputs -#internal name: element::nodal_contribution -#scripting name: element_nodal_contribution class InputsElementNodalContribution(_Inputs): - """Intermediate class used to connect user inputs to element_nodal_contribution operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.element_nodal_contribution() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_volume_fraction = bool() - >>> op.inputs.volume_fraction.connect(my_volume_fraction) + """Intermediate class used to connect user inputs to + element_nodal_contribution operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.element_nodal_contribution() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_volume_fraction = bool() + >>> op.inputs.volume_fraction.connect(my_volume_fraction) """ + def __init__(self, op: Operator): super().__init__(element_nodal_contribution._spec().inputs, op) - self._mesh = Input(element_nodal_contribution._spec().input_pin(0), 0, op, -1) + self._mesh = Input(element_nodal_contribution._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._scoping = Input(element_nodal_contribution._spec().input_pin(1), 1, op, -1) + self._scoping = Input( + element_nodal_contribution._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._volume_fraction = Input(element_nodal_contribution._spec().input_pin(2), 2, op, -1) + self._volume_fraction = Input( + element_nodal_contribution._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._volume_fraction) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.element_nodal_contribution() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: Integrate the input field over a specific scoping. + Integrate the input field over a specific + scoping. Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.element_nodal_contribution() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def volume_fraction(self): - """Allows to connect volume_fraction input to the operator + """Allows to connect volume_fraction input to the operator. - - pindoc: if true, returns influence volume, if false, return influence volume fraction (i.e. integrated value of shape function for each node). + If true, returns influence volume, if false, + return influence volume fraction + (i.e. integrated value of shape + function for each node). Parameters ---------- - my_volume_fraction : bool, + my_volume_fraction : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.element_nodal_contribution() >>> op.inputs.volume_fraction.connect(my_volume_fraction) - >>> #or + >>> # or >>> op.inputs.volume_fraction(my_volume_fraction) - """ return self._volume_fraction + class OutputsElementNodalContribution(_Outputs): - """Intermediate class used to get outputs from element_nodal_contribution operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.element_nodal_contribution() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + element_nodal_contribution operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.element_nodal_contribution() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(element_nodal_contribution._spec().outputs, op) - self._field = Output(element_nodal_contribution._spec().output_pin(0), 0, op) + self._field = Output(element_nodal_contribution._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.element_nodal_contribution() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py b/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py index c269a2637cf..e0425872ab8 100644 --- a/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py +++ b/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py @@ -1,75 +1,132 @@ """ elements_facets_surfaces_over_time -================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class elements_facets_surfaces_over_time(Operator): - """Calculation of the surface of each element's facet over time of a mesh for each specified time step. Moreover, it gives as output a new mesh made with only surface elements. - - available inputs: - - scoping (Scoping) (optional) - - displacement (FieldsContainer) (optional) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - mesh (MeshedRegion) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() - - >>> # Make input connections - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_displacement = dpf.FieldsContainer() - >>> op.inputs.displacement.connect(my_displacement) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time(scoping=my_scoping,displacement=my_displacement,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_mesh = op.outputs.mesh()""" - def __init__(self, scoping=None, displacement=None, mesh=None, config=None, server=None): - super().__init__(name="surfaces_provider", config = config, server = server) + """Calculation of the surface of each element's facet over time of a mesh + for each specified time step. Moreover, it gives as output a new + mesh made with only surface elements. + + Parameters + ---------- + scoping : Scoping, optional + displacement : FieldsContainer, optional + Displacement field's container. + mesh : MeshedRegion, optional + Mesh must be defined if the displacement + field's container does not contain + it, or if there is no displacement. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.elements_facets_surfaces_over_time( + ... scoping=my_scoping, + ... displacement=my_displacement, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_mesh = op.outputs.mesh() + """ + + def __init__( + self, scoping=None, displacement=None, mesh=None, config=None, server=None + ): + super().__init__(name="surfaces_provider", config=config, server=server) self._inputs = InputsElementsFacetsSurfacesOverTime(self) self._outputs = OutputsElementsFacetsSurfacesOverTime(self) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if displacement !=None: + if displacement is not None: self.inputs.displacement.connect(displacement) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Calculation of the surface of each element's facet over time of a mesh for each specified time step. Moreover, it gives as output a new mesh made with only surface elements.""", - map_input_pin_spec={ - 1 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document=""""""), - 2 : PinSpecification(name = "displacement", type_names=["fields_container"], optional=True, document="""Displacement field's container."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""Surfaces field."""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""Mesh made of surface elements only.""")}) + description = """Calculation of the surface of each element's facet over time of a mesh + for each specified time step. Moreover, it gives as output + a new mesh made with only surface elements.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="displacement", + type_names=["fields_container"], + optional=True, + document="""Displacement field's container.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Mesh must be defined if the displacement + field's container does not contain + it, or if there is no displacement.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Surfaces field.""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""Mesh made of surface elements only.""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "surfaces_provider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="surfaces_provider", server=server) @property def inputs(self): @@ -77,169 +134,167 @@ def inputs(self): Returns -------- - inputs : InputsElementsFacetsSurfacesOverTime + inputs : InputsElementsFacetsSurfacesOverTime """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementsFacetsSurfacesOverTime + outputs : OutputsElementsFacetsSurfacesOverTime """ return super().outputs -#internal name: surfaces_provider -#scripting name: elements_facets_surfaces_over_time class InputsElementsFacetsSurfacesOverTime(_Inputs): - """Intermediate class used to connect user inputs to elements_facets_surfaces_over_time operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_displacement = dpf.FieldsContainer() - >>> op.inputs.displacement.connect(my_displacement) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + elements_facets_surfaces_over_time operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(elements_facets_surfaces_over_time._spec().inputs, op) - self._scoping = Input(elements_facets_surfaces_over_time._spec().input_pin(1), 1, op, -1) + self._scoping = Input( + elements_facets_surfaces_over_time._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._displacement = Input(elements_facets_surfaces_over_time._spec().input_pin(2), 2, op, -1) + self._displacement = Input( + elements_facets_surfaces_over_time._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._displacement) - self._mesh = Input(elements_facets_surfaces_over_time._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + elements_facets_surfaces_over_time._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def displacement(self): - """Allows to connect displacement input to the operator + """Allows to connect displacement input to the operator. - - pindoc: Displacement field's container. + Displacement field's container. Parameters ---------- - my_displacement : FieldsContainer, + my_displacement : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() >>> op.inputs.displacement.connect(my_displacement) - >>> #or + >>> # or >>> op.inputs.displacement(my_displacement) - """ return self._displacement @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement. + Mesh must be defined if the displacement + field's container does not contain + it, or if there is no displacement. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsElementsFacetsSurfacesOverTime(_Outputs): - """Intermediate class used to get outputs from elements_facets_surfaces_over_time operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_mesh = op.outputs.mesh() + """Intermediate class used to get outputs from + elements_facets_surfaces_over_time operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_mesh = op.outputs.mesh() """ + def __init__(self, op: Operator): super().__init__(elements_facets_surfaces_over_time._spec().outputs, op) - self._fields_container = Output(elements_facets_surfaces_over_time._spec().output_pin(0), 0, op) + self._fields_container = Output( + elements_facets_surfaces_over_time._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) - self._mesh = Output(elements_facets_surfaces_over_time._spec().output_pin(1), 1, op) + self._mesh = Output( + elements_facets_surfaces_over_time._spec().output_pin(1), 1, op + ) self._outputs.append(self._mesh) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: Surfaces field. - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def mesh(self): """Allows to get mesh output of the operator - - - pindoc: Mesh made of surface elements only. - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_facets_surfaces_over_time() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh - diff --git a/ansys/dpf/core/operators/geo/elements_volume.py b/ansys/dpf/core/operators/geo/elements_volume.py index 67f3a372bfd..82066339b03 100644 --- a/ansys/dpf/core/operators/geo/elements_volume.py +++ b/ansys/dpf/core/operators/geo/elements_volume.py @@ -1,60 +1,91 @@ """ elements_volume =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class elements_volume(Operator): - """Compute the volume of each element of a mesh, using default shape functions. + """Compute the volume of each element of a mesh, using default shape + functions. + + Parameters + ---------- + mesh : MeshedRegion - available inputs: - - mesh (MeshedRegion) - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.geo.elements_volume() - >>> # Instantiate operator - >>> op = dpf.operators.geo.elements_volume() + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.elements_volume( + ... mesh=my_mesh, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.elements_volume(mesh=my_mesh) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, mesh=None, config=None, server=None): - super().__init__(name="element::volume", config = config, server = server) + super().__init__(name="element::volume", config=config, server=server) self._inputs = InputsElementsVolume(self) self._outputs = OutputsElementsVolume(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Compute the volume of each element of a mesh, using default shape functions.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the volume of each element of a mesh, using default shape + functions.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "element::volume") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="element::volume", server=server) @property def inputs(self): @@ -62,91 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsElementsVolume + inputs : InputsElementsVolume """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementsVolume + outputs : OutputsElementsVolume """ return super().outputs -#internal name: element::volume -#scripting name: elements_volume class InputsElementsVolume(_Inputs): - """Intermediate class used to connect user inputs to elements_volume operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.elements_volume() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + elements_volume operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.elements_volume() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(elements_volume._spec().inputs, op) - self._mesh = Input(elements_volume._spec().input_pin(0), 0, op, -1) + self._mesh = Input(elements_volume._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_volume() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsElementsVolume(_Outputs): - """Intermediate class used to get outputs from elements_volume operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.elements_volume() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + elements_volume operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.elements_volume() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(elements_volume._spec().outputs, op) - self._field = Output(elements_volume._spec().output_pin(0), 0, op) + self._field = Output(elements_volume._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_volume() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/elements_volumes_over_time.py b/ansys/dpf/core/operators/geo/elements_volumes_over_time.py index 83a5a3ae50e..1c8fc2ff3a2 100644 --- a/ansys/dpf/core/operators/geo/elements_volumes_over_time.py +++ b/ansys/dpf/core/operators/geo/elements_volumes_over_time.py @@ -1,72 +1,127 @@ """ elements_volumes_over_time -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class elements_volumes_over_time(Operator): - """Calculation of the volume of each element over time of a mesh for each specified time step. - - available inputs: - - scoping (Scoping) (optional) - - displacement (FieldsContainer) (optional) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.elements_volumes_over_time() - - >>> # Make input connections - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_displacement = dpf.FieldsContainer() - >>> op.inputs.displacement.connect(my_displacement) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.elements_volumes_over_time(scoping=my_scoping,displacement=my_displacement,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, scoping=None, displacement=None, mesh=None, config=None, server=None): - super().__init__(name="volumes_provider", config = config, server = server) + """Calculation of the volume of each element over time of a mesh for each + specified time step. + + Parameters + ---------- + scoping : Scoping, optional + displacement : FieldsContainer, optional + Displacement field's container. must contain + the mesh if mesh not specified in + input. + mesh : MeshedRegion, optional + Mesh must be defined if the displacement + field's container does not contain + it, or if there is no displacement. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.elements_volumes_over_time() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.elements_volumes_over_time( + ... scoping=my_scoping, + ... displacement=my_displacement, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, scoping=None, displacement=None, mesh=None, config=None, server=None + ): + super().__init__(name="volumes_provider", config=config, server=server) self._inputs = InputsElementsVolumesOverTime(self) self._outputs = OutputsElementsVolumesOverTime(self) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if displacement !=None: + if displacement is not None: self.inputs.displacement.connect(displacement) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Calculation of the volume of each element over time of a mesh for each specified time step.""", - map_input_pin_spec={ - 1 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document=""""""), - 2 : PinSpecification(name = "displacement", type_names=["fields_container"], optional=True, document="""Displacement field's container. Must contain the mesh if mesh not specified in input."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Calculation of the volume of each element over time of a mesh for each + specified time step.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="displacement", + type_names=["fields_container"], + optional=True, + document="""Displacement field's container. must contain + the mesh if mesh not specified in + input.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Mesh must be defined if the displacement + field's container does not contain + it, or if there is no displacement.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "volumes_provider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="volumes_provider", server=server) @property def inputs(self): @@ -74,143 +129,145 @@ def inputs(self): Returns -------- - inputs : InputsElementsVolumesOverTime + inputs : InputsElementsVolumesOverTime """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementsVolumesOverTime + outputs : OutputsElementsVolumesOverTime """ return super().outputs -#internal name: volumes_provider -#scripting name: elements_volumes_over_time class InputsElementsVolumesOverTime(_Inputs): - """Intermediate class used to connect user inputs to elements_volumes_over_time operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.elements_volumes_over_time() - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_displacement = dpf.FieldsContainer() - >>> op.inputs.displacement.connect(my_displacement) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + elements_volumes_over_time operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.elements_volumes_over_time() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(elements_volumes_over_time._spec().inputs, op) - self._scoping = Input(elements_volumes_over_time._spec().input_pin(1), 1, op, -1) + self._scoping = Input( + elements_volumes_over_time._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._scoping) - self._displacement = Input(elements_volumes_over_time._spec().input_pin(2), 2, op, -1) + self._displacement = Input( + elements_volumes_over_time._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._displacement) - self._mesh = Input(elements_volumes_over_time._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elements_volumes_over_time._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_volumes_over_time() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def displacement(self): - """Allows to connect displacement input to the operator + """Allows to connect displacement input to the operator. - - pindoc: Displacement field's container. Must contain the mesh if mesh not specified in input. + Displacement field's container. must contain + the mesh if mesh not specified in + input. Parameters ---------- - my_displacement : FieldsContainer, + my_displacement : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_volumes_over_time() >>> op.inputs.displacement.connect(my_displacement) - >>> #or + >>> # or >>> op.inputs.displacement(my_displacement) - """ return self._displacement @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement. + Mesh must be defined if the displacement + field's container does not contain + it, or if there is no displacement. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_volumes_over_time() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsElementsVolumesOverTime(_Outputs): - """Intermediate class used to get outputs from elements_volumes_over_time operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.elements_volumes_over_time() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elements_volumes_over_time operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.elements_volumes_over_time() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elements_volumes_over_time._spec().outputs, op) - self._fields_container = Output(elements_volumes_over_time._spec().output_pin(0), 0, op) + self._fields_container = Output( + elements_volumes_over_time._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.elements_volumes_over_time() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/geo/gauss_to_node.py b/ansys/dpf/core/operators/geo/gauss_to_node.py index ba19a6cbda1..89da8f9de86 100644 --- a/ansys/dpf/core/operators/geo/gauss_to_node.py +++ b/ansys/dpf/core/operators/geo/gauss_to_node.py @@ -1,72 +1,124 @@ """ gauss_to_node -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class gauss_to_node(Operator): - """Extrapolating results available at Gauss or quadrature points to nodal points for one field. The available elements are : Linear quadrangle , parabolique quadrangle,Linear Hexagonal, quadratic hexagonal , linear tetrahedral, quadratic tetrahedral - - available inputs: - - field (Field) - - scoping (Scoping) (optional) - - mesh (MeshedRegion) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.gauss_to_node() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.gauss_to_node(field=my_field,scoping=my_scoping,mesh=my_mesh) + """Extrapolating results available at Gauss or quadrature points to nodal + points for one field. The available elements are : Linear + quadrangle , parabolique quadrangle,Linear Hexagonal, quadratic + hexagonal , linear tetrahedral, quadratic tetrahedral + + Parameters + ---------- + field : Field + scoping : Scoping, optional + Scoping to integrate on, if not provided, the + one from input field is provided. + mesh : MeshedRegion, optional + Mesh to integrate on. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.gauss_to_node() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.gauss_to_node( + ... field=my_field, + ... scoping=my_scoping, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, scoping=None, mesh=None, config=None, server=None): - super().__init__(name="gauss_to_node", config = config, server = server) + super().__init__(name="gauss_to_node", config=config, server=server) self._inputs = InputsGaussToNode(self) self._outputs = OutputsGaussToNode(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Extrapolating results available at Gauss or quadrature points to nodal points for one field. The available elements are : Linear quadrangle , parabolique quadrangle,Linear Hexagonal, quadratic hexagonal , linear tetrahedral, quadratic tetrahedral """, - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document="""Scoping to integrate on, if not provided, the one from input field is provided."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""Mesh to integrate on.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Extrapolating results available at Gauss or quadrature points to nodal + points for one field. The available elements are : Linear + quadrangle , parabolique quadrangle,Linear Hexagonal, + quadratic hexagonal , linear tetrahedral, quadratic + tetrahedral""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""Scoping to integrate on, if not provided, the + one from input field is provided.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Mesh to integrate on.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "gauss_to_node") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="gauss_to_node", server=server) @property def inputs(self): @@ -74,143 +126,136 @@ def inputs(self): Returns -------- - inputs : InputsGaussToNode + inputs : InputsGaussToNode """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsGaussToNode + outputs : OutputsGaussToNode """ return super().outputs -#internal name: gauss_to_node -#scripting name: gauss_to_node class InputsGaussToNode(_Inputs): - """Intermediate class used to connect user inputs to gauss_to_node operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.gauss_to_node() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + gauss_to_node operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.gauss_to_node() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(gauss_to_node._spec().inputs, op) - self._field = Input(gauss_to_node._spec().input_pin(0), 0, op, -1) + self._field = Input(gauss_to_node._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._scoping = Input(gauss_to_node._spec().input_pin(1), 1, op, -1) + self._scoping = Input(gauss_to_node._spec().input_pin(1), 1, op, -1) self._inputs.append(self._scoping) - self._mesh = Input(gauss_to_node._spec().input_pin(7), 7, op, -1) + self._mesh = Input(gauss_to_node._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.gauss_to_node() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: Scoping to integrate on, if not provided, the one from input field is provided. + Scoping to integrate on, if not provided, the + one from input field is provided. Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.gauss_to_node() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: Mesh to integrate on. + Mesh to integrate on. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.gauss_to_node() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsGaussToNode(_Outputs): - """Intermediate class used to get outputs from gauss_to_node operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.gauss_to_node() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + gauss_to_node operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.gauss_to_node() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(gauss_to_node._spec().outputs, op) - self._field = Output(gauss_to_node._spec().output_pin(0), 0, op) + self._field = Output(gauss_to_node._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.gauss_to_node() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/integrate_over_elements.py b/ansys/dpf/core/operators/geo/integrate_over_elements.py index af1c2edbf55..e88c77cfcdb 100644 --- a/ansys/dpf/core/operators/geo/integrate_over_elements.py +++ b/ansys/dpf/core/operators/geo/integrate_over_elements.py @@ -1,72 +1,119 @@ """ integrate_over_elements -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class integrate_over_elements(Operator): """Integration of an input field over mesh. - available inputs: - - field (Field) - - scoping (Scoping) (optional) - - mesh (MeshedRegion) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.integrate_over_elements() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.integrate_over_elements(field=my_field,scoping=my_scoping,mesh=my_mesh) + Parameters + ---------- + field : Field + scoping : Scoping, optional + Integrate the input field over a specific + scoping. + mesh : MeshedRegion, optional + Mesh to integrate on, if not provided the one + from input field is provided. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.integrate_over_elements() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.integrate_over_elements( + ... field=my_field, + ... scoping=my_scoping, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, scoping=None, mesh=None, config=None, server=None): - super().__init__(name="element::integrate", config = config, server = server) + super().__init__(name="element::integrate", config=config, server=server) self._inputs = InputsIntegrateOverElements(self) self._outputs = OutputsIntegrateOverElements(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Integration of an input field over mesh.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "scoping", type_names=["scoping"], optional=True, document="""Integrate the input field over a specific scoping."""), - 2 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""Mesh to integrate on, if not provided the one from input field is provided.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Integration of an input field over mesh.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""Integrate the input field over a specific + scoping.""", + ), + 2: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Mesh to integrate on, if not provided the one + from input field is provided.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "element::integrate") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="element::integrate", server=server) @property def inputs(self): @@ -74,143 +121,137 @@ def inputs(self): Returns -------- - inputs : InputsIntegrateOverElements + inputs : InputsIntegrateOverElements """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIntegrateOverElements + outputs : OutputsIntegrateOverElements """ return super().outputs -#internal name: element::integrate -#scripting name: integrate_over_elements class InputsIntegrateOverElements(_Inputs): - """Intermediate class used to connect user inputs to integrate_over_elements operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.integrate_over_elements() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + integrate_over_elements operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.integrate_over_elements() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(integrate_over_elements._spec().inputs, op) - self._field = Input(integrate_over_elements._spec().input_pin(0), 0, op, -1) + self._field = Input(integrate_over_elements._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._scoping = Input(integrate_over_elements._spec().input_pin(1), 1, op, -1) + self._scoping = Input(integrate_over_elements._spec().input_pin(1), 1, op, -1) self._inputs.append(self._scoping) - self._mesh = Input(integrate_over_elements._spec().input_pin(2), 2, op, -1) + self._mesh = Input(integrate_over_elements._spec().input_pin(2), 2, op, -1) self._inputs.append(self._mesh) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.integrate_over_elements() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: Integrate the input field over a specific scoping. + Integrate the input field over a specific + scoping. Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.integrate_over_elements() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: Mesh to integrate on, if not provided the one from input field is provided. + Mesh to integrate on, if not provided the one + from input field is provided. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.integrate_over_elements() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsIntegrateOverElements(_Outputs): - """Intermediate class used to get outputs from integrate_over_elements operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.integrate_over_elements() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + integrate_over_elements operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.integrate_over_elements() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(integrate_over_elements._spec().outputs, op) - self._field = Output(integrate_over_elements._spec().output_pin(0), 0, op) + self._field = Output(integrate_over_elements._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.integrate_over_elements() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/mass.py b/ansys/dpf/core/operators/geo/mass.py index 5df57b19f7c..d59fa3aec33 100644 --- a/ansys/dpf/core/operators/geo/mass.py +++ b/ansys/dpf/core/operators/geo/mass.py @@ -1,72 +1,121 @@ """ mass -==== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class mass(Operator): """Compute the mass of a set of elements. - available inputs: - - mesh (MeshedRegion) (optional) - - mesh_scoping (Scoping) (optional) - - field (Field) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.mass() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.mass(mesh=my_mesh,mesh_scoping=my_mesh_scoping,field=my_field) + Parameters + ---------- + mesh : MeshedRegion, optional + mesh_scoping : Scoping, optional + Mesh scoping, if not set, all the elements of + the mesh are considered. + field : Field, optional + Elemental or nodal ponderation used in + computation. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.mass() + + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.mass( + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... field=my_field, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, mesh=None, mesh_scoping=None, field=None, config=None, server=None): - super().__init__(name="topology::mass", config = config, server = server) + def __init__( + self, mesh=None, mesh_scoping=None, field=None, config=None, server=None + ): + super().__init__(name="topology::mass", config=config, server=server) self._inputs = InputsMass(self) self._outputs = OutputsMass(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Compute the mass of a set of elements.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""Mesh scoping, if not set, all the elements of the mesh are considered."""), - 2 : PinSpecification(name = "field", type_names=["field"], optional=True, document="""Elemental or nodal ponderation used in computation.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the mass of a set of elements.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Mesh scoping, if not set, all the elements of + the mesh are considered.""", + ), + 2: PinSpecification( + name="field", + type_names=["field"], + optional=True, + document="""Elemental or nodal ponderation used in + computation.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "topology::mass") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="topology::mass", server=server) @property def inputs(self): @@ -74,143 +123,137 @@ def inputs(self): Returns -------- - inputs : InputsMass + inputs : InputsMass """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMass + outputs : OutputsMass """ return super().outputs -#internal name: topology::mass -#scripting name: mass class InputsMass(_Inputs): - """Intermediate class used to connect user inputs to mass operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.mass() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + mass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.mass() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(mass._spec().inputs, op) - self._mesh = Input(mass._spec().input_pin(0), 0, op, -1) + self._mesh = Input(mass._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(mass._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(mass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._field = Input(mass._spec().input_pin(2), 2, op, -1) + self._field = Input(mass._spec().input_pin(2), 2, op, -1) self._inputs.append(self._field) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: Mesh scoping, if not set, all the elements of the mesh are considered. + Mesh scoping, if not set, all the elements of + the mesh are considered. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: Elemental or nodal ponderation used in computation. + Elemental or nodal ponderation used in + computation. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsMass(_Outputs): - """Intermediate class used to get outputs from mass operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.mass() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + mass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.mass() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(mass._spec().outputs, op) - self._field = Output(mass._spec().output_pin(0), 0, op) + self._field = Output(mass._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/moment_of_inertia.py b/ansys/dpf/core/operators/geo/moment_of_inertia.py index 80f998fbade..f8e830413d4 100644 --- a/ansys/dpf/core/operators/geo/moment_of_inertia.py +++ b/ansys/dpf/core/operators/geo/moment_of_inertia.py @@ -1,78 +1,146 @@ """ moment_of_inertia -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class moment_of_inertia(Operator): """Compute the inertia tensor of a set of elements. - available inputs: - - mesh (MeshedRegion) (optional) - - mesh_scoping (Scoping) (optional) - - field (Field) (optional) - - boolean (bool) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.moment_of_inertia() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_boolean = bool() - >>> op.inputs.boolean.connect(my_boolean) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.moment_of_inertia(mesh=my_mesh,mesh_scoping=my_mesh_scoping,field=my_field,boolean=my_boolean) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, mesh=None, mesh_scoping=None, field=None, boolean=None, config=None, server=None): - super().__init__(name="topology::moment_of_inertia", config = config, server = server) + Parameters + ---------- + mesh : MeshedRegion, optional + mesh_scoping : Scoping, optional + Mesh scoping, if not set, all the elements of + the mesh are considered. + field : Field, optional + Elemental or nodal ponderation used in + computation. + boolean : bool, optional + Default true, compute inertia tensor at + center of gravity. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.moment_of_inertia() + + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.moment_of_inertia( + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... field=my_field, + ... boolean=my_boolean, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + mesh=None, + mesh_scoping=None, + field=None, + boolean=None, + config=None, + server=None, + ): + super().__init__( + name="topology::moment_of_inertia", config=config, server=server + ) self._inputs = InputsMomentOfInertia(self) self._outputs = OutputsMomentOfInertia(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if boolean !=None: + if boolean is not None: self.inputs.boolean.connect(boolean) @staticmethod def _spec(): - spec = Specification(description="""Compute the inertia tensor of a set of elements.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""Mesh scoping, if not set, all the elements of the mesh are considered."""), - 2 : PinSpecification(name = "field", type_names=["field"], optional=True, document="""Elemental or nodal ponderation used in computation."""), - 3 : PinSpecification(name = "boolean", type_names=["bool"], optional=True, document="""default true, compute inertia tensor at center of gravity.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the inertia tensor of a set of elements.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Mesh scoping, if not set, all the elements of + the mesh are considered.""", + ), + 2: PinSpecification( + name="field", + type_names=["field"], + optional=True, + document="""Elemental or nodal ponderation used in + computation.""", + ), + 3: PinSpecification( + name="boolean", + type_names=["bool"], + optional=True, + document="""Default true, compute inertia tensor at + center of gravity.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "topology::moment_of_inertia") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="topology::moment_of_inertia", server=server + ) @property def inputs(self): @@ -80,169 +148,162 @@ def inputs(self): Returns -------- - inputs : InputsMomentOfInertia + inputs : InputsMomentOfInertia """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMomentOfInertia + outputs : OutputsMomentOfInertia """ return super().outputs -#internal name: topology::moment_of_inertia -#scripting name: moment_of_inertia class InputsMomentOfInertia(_Inputs): - """Intermediate class used to connect user inputs to moment_of_inertia operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.moment_of_inertia() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_boolean = bool() - >>> op.inputs.boolean.connect(my_boolean) + """Intermediate class used to connect user inputs to + moment_of_inertia operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.moment_of_inertia() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) """ + def __init__(self, op: Operator): super().__init__(moment_of_inertia._spec().inputs, op) - self._mesh = Input(moment_of_inertia._spec().input_pin(0), 0, op, -1) + self._mesh = Input(moment_of_inertia._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(moment_of_inertia._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(moment_of_inertia._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._field = Input(moment_of_inertia._spec().input_pin(2), 2, op, -1) + self._field = Input(moment_of_inertia._spec().input_pin(2), 2, op, -1) self._inputs.append(self._field) - self._boolean = Input(moment_of_inertia._spec().input_pin(3), 3, op, -1) + self._boolean = Input(moment_of_inertia._spec().input_pin(3), 3, op, -1) self._inputs.append(self._boolean) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: Mesh scoping, if not set, all the elements of the mesh are considered. + Mesh scoping, if not set, all the elements of + the mesh are considered. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: Elemental or nodal ponderation used in computation. + Elemental or nodal ponderation used in + computation. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def boolean(self): - """Allows to connect boolean input to the operator + """Allows to connect boolean input to the operator. - - pindoc: default true, compute inertia tensor at center of gravity. + Default true, compute inertia tensor at + center of gravity. Parameters ---------- - my_boolean : bool, + my_boolean : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() >>> op.inputs.boolean.connect(my_boolean) - >>> #or + >>> # or >>> op.inputs.boolean(my_boolean) - """ return self._boolean + class OutputsMomentOfInertia(_Outputs): - """Intermediate class used to get outputs from moment_of_inertia operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.moment_of_inertia() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + moment_of_inertia operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.moment_of_inertia() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(moment_of_inertia._spec().outputs, op) - self._field = Output(moment_of_inertia._spec().output_pin(0), 0, op) + self._field = Output(moment_of_inertia._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/normals.py b/ansys/dpf/core/operators/geo/normals.py index 4a195fcd115..13f75bf6661 100644 --- a/ansys/dpf/core/operators/geo/normals.py +++ b/ansys/dpf/core/operators/geo/normals.py @@ -1,72 +1,119 @@ """ normals -======= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "geo" category -""" class normals(Operator): - """compute the normals at the given nodes or element scoping based on the given mesh (first version, the element normal is only handled on the shell elements) - - available inputs: - - mesh (MeshedRegion) (optional) - - mesh_scoping (Scoping) (optional) - - field (Field) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.normals() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.normals(mesh=my_mesh,mesh_scoping=my_mesh_scoping,field=my_field) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, mesh=None, mesh_scoping=None, field=None, config=None, server=None): - super().__init__(name="normals_provider", config = config, server = server) + """compute the normals at the given nodes or element scoping based on the + given mesh (first version, the element normal is only handled on + the shell elements) + + Parameters + ---------- + mesh : MeshedRegion, optional + mesh_scoping : Scoping, optional + field : Field, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.normals() + + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.normals( + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... field=my_field, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, mesh=None, mesh_scoping=None, field=None, config=None, server=None + ): + super().__init__(name="normals_provider", config=config, server=server) self._inputs = InputsNormals(self) self._outputs = OutputsNormals(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""compute the normals at the given nodes or element scoping based on the given mesh (first version, the element normal is only handled on the shell elements)""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document=""""""), - 3 : PinSpecification(name = "field", type_names=["field"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """compute the normals at the given nodes or element scoping based on the + given mesh (first version, the element normal is only + handled on the shell elements)""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="field", + type_names=["field"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "normals_provider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="normals_provider", server=server) @property def inputs(self): @@ -74,139 +121,131 @@ def inputs(self): Returns -------- - inputs : InputsNormals + inputs : InputsNormals """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNormals + outputs : OutputsNormals """ return super().outputs -#internal name: normals_provider -#scripting name: normals class InputsNormals(_Inputs): - """Intermediate class used to connect user inputs to normals operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.normals() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + normals operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.normals() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(normals._spec().inputs, op) - self._mesh = Input(normals._spec().input_pin(0), 0, op, -1) + self._mesh = Input(normals._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(normals._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(normals._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._field = Input(normals._spec().input_pin(3), 3, op, -1) + self._field = Input(normals._spec().input_pin(3), 3, op, -1) self._inputs.append(self._field) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.normals() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.normals() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.normals() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsNormals(_Outputs): - """Intermediate class used to get outputs from normals operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.normals() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + normals operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.normals() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(normals._spec().outputs, op) - self._field = Output(normals._spec().output_pin(0), 0, op) + self._field = Output(normals._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.normals() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/normals_provider_nl.py b/ansys/dpf/core/operators/geo/normals_provider_nl.py index 99ec338b31a..027c8b681b2 100644 --- a/ansys/dpf/core/operators/geo/normals_provider_nl.py +++ b/ansys/dpf/core/operators/geo/normals_provider_nl.py @@ -1,66 +1,131 @@ """ normals_provider_nl -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class normals_provider_nl(Operator): - """Compute the normals on nodes/elements based on integration points(more accurate for non-linear elements), on a skin mesh - - available inputs: - - mesh (MeshedRegion) - - mesh_scoping (Scoping) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.normals_provider_nl() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.normals_provider_nl(mesh=my_mesh,mesh_scoping=my_mesh_scoping) + """Compute the normals on nodes/elements based on integration points(more + accurate for non-linear elements), on a skin mesh + + Parameters + ---------- + mesh : MeshedRegion + Skin or shell mesh region + mesh_scoping : Scoping, optional + Elemental, elementalnodal, or nodal scoping. + location derived from this. + requested_location : str, optional + If no scoping, specifies location. if scoping + is elemental or elementalnodal this + overrides scoping. default elemental. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.normals_provider_nl() + + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.normals_provider_nl( + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... requested_location=my_requested_location, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, mesh=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="normals_provider_nl", config = config, server = server) + def __init__( + self, + mesh=None, + mesh_scoping=None, + requested_location=None, + config=None, + server=None, + ): + super().__init__(name="normals_provider_nl", config=config, server=server) self._inputs = InputsNormalsProviderNl(self) self._outputs = OutputsNormalsProviderNl(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) @staticmethod def _spec(): - spec = Specification(description="""Compute the normals on nodes/elements based on integration points(more accurate for non-linear elements), on a skin mesh""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""skin or shell mesh region"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the normals on nodes/elements based on integration points(more + accurate for non-linear elements), on a skin mesh""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""Skin or shell mesh region""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Elemental, elementalnodal, or nodal scoping. + location derived from this.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""If no scoping, specifies location. if scoping + is elemental or elementalnodal this + overrides scoping. default elemental.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "normals_provider_nl") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="normals_provider_nl", server=server) @property def inputs(self): @@ -68,117 +133,142 @@ def inputs(self): Returns -------- - inputs : InputsNormalsProviderNl + inputs : InputsNormalsProviderNl """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNormalsProviderNl + outputs : OutputsNormalsProviderNl """ return super().outputs -#internal name: normals_provider_nl -#scripting name: normals_provider_nl class InputsNormalsProviderNl(_Inputs): - """Intermediate class used to connect user inputs to normals_provider_nl operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.normals_provider_nl() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + normals_provider_nl operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.normals_provider_nl() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) """ + def __init__(self, op: Operator): super().__init__(normals_provider_nl._spec().inputs, op) - self._mesh = Input(normals_provider_nl._spec().input_pin(0), 0, op, -1) + self._mesh = Input(normals_provider_nl._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(normals_provider_nl._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(normals_provider_nl._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) + self._requested_location = Input( + normals_provider_nl._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: skin or shell mesh region + Skin or shell mesh region Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.normals_provider_nl() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. + + Elemental, elementalnodal, or nodal scoping. + location derived from this. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.normals_provider_nl() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + If no scoping, specifies location. if scoping + is elemental or elementalnodal this + overrides scoping. default elemental. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.normals_provider_nl() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + class OutputsNormalsProviderNl(_Outputs): - """Intermediate class used to get outputs from normals_provider_nl operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.normals_provider_nl() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + normals_provider_nl operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.normals_provider_nl() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(normals_provider_nl._spec().outputs, op) - self._field = Output(normals_provider_nl._spec().output_pin(0), 0, op) + self._field = Output(normals_provider_nl._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.normals_provider_nl() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/rotate.py b/ansys/dpf/core/operators/geo/rotate.py index eaad232dd9a..c58854e120c 100644 --- a/ansys/dpf/core/operators/geo/rotate.py +++ b/ansys/dpf/core/operators/geo/rotate.py @@ -1,66 +1,107 @@ """ rotate -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class rotate(Operator): """Apply a transformation (rotation) matrix on field. - available inputs: - - field (Field, FieldsContainer) - - field_rotation_matrix (Field) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + field_rotation_matrix : Field + 3-3 rotation matrix + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.geo.rotate() - >>> # Instantiate operator - >>> op = dpf.operators.geo.rotate() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_field_rotation_matrix = dpf.Field() + >>> op.inputs.field_rotation_matrix.connect(my_field_rotation_matrix) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_field_rotation_matrix = dpf.Field() - >>> op.inputs.field_rotation_matrix.connect(my_field_rotation_matrix) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.rotate( + ... field=my_field, + ... field_rotation_matrix=my_field_rotation_matrix, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.rotate(field=my_field,field_rotation_matrix=my_field_rotation_matrix) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, field_rotation_matrix=None, config=None, server=None): - super().__init__(name="rotate", config = config, server = server) + def __init__( + self, field=None, field_rotation_matrix=None, config=None, server=None + ): + super().__init__(name="rotate", config=config, server=server) self._inputs = InputsRotate(self) self._outputs = OutputsRotate(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if field_rotation_matrix !=None: + if field_rotation_matrix is not None: self.inputs.field_rotation_matrix.connect(field_rotation_matrix) @staticmethod def _spec(): - spec = Specification(description="""Apply a transformation (rotation) matrix on field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "field_rotation_matrix", type_names=["field"], optional=False, document="""3-3 rotation matrix""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Apply a transformation (rotation) matrix on field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="field_rotation_matrix", + type_names=["field"], + optional=False, + document="""3-3 rotation matrix""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "rotate") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="rotate", server=server) @property def inputs(self): @@ -68,119 +109,114 @@ def inputs(self): Returns -------- - inputs : InputsRotate + inputs : InputsRotate """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRotate + outputs : OutputsRotate """ return super().outputs -#internal name: rotate -#scripting name: rotate class InputsRotate(_Inputs): - """Intermediate class used to connect user inputs to rotate operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.rotate() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_field_rotation_matrix = dpf.Field() - >>> op.inputs.field_rotation_matrix.connect(my_field_rotation_matrix) + """Intermediate class used to connect user inputs to + rotate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.rotate() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_field_rotation_matrix = dpf.Field() + >>> op.inputs.field_rotation_matrix.connect(my_field_rotation_matrix) """ + def __init__(self, op: Operator): super().__init__(rotate._spec().inputs, op) - self._field = Input(rotate._spec().input_pin(0), 0, op, -1) + self._field = Input(rotate._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._field_rotation_matrix = Input(rotate._spec().input_pin(1), 1, op, -1) + self._field_rotation_matrix = Input(rotate._spec().input_pin(1), 1, op, -1) self._inputs.append(self._field_rotation_matrix) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def field_rotation_matrix(self): - """Allows to connect field_rotation_matrix input to the operator + """Allows to connect field_rotation_matrix input to the operator. - - pindoc: 3-3 rotation matrix + 3-3 rotation matrix Parameters ---------- - my_field_rotation_matrix : Field, + my_field_rotation_matrix : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate() >>> op.inputs.field_rotation_matrix.connect(my_field_rotation_matrix) - >>> #or + >>> # or >>> op.inputs.field_rotation_matrix(my_field_rotation_matrix) - """ return self._field_rotation_matrix + class OutputsRotate(_Outputs): - """Intermediate class used to get outputs from rotate operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.rotate() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + rotate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.rotate() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(rotate._spec().outputs, op) - self._field = Output(rotate._spec().output_pin(0), 0, op) + self._field = Output(rotate._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/geo/rotate_fc.py b/ansys/dpf/core/operators/geo/rotate_fc.py index 32eea0d7cf4..79d317cff29 100644 --- a/ansys/dpf/core/operators/geo/rotate_fc.py +++ b/ansys/dpf/core/operators/geo/rotate_fc.py @@ -1,66 +1,106 @@ """ rotate_fc -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class rotate_fc(Operator): - """Apply a transformation (rotation) matrix on all the fields of a fields container. + """Apply a transformation (rotation) matrix on all the fields of a fields + container. + + Parameters + ---------- + fields_container : FieldsContainer + coordinate_system : Field + 3-3 rotation matrix - available inputs: - - fields_container (FieldsContainer) - - coordinate_system (Field) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.geo.rotate_fc() - >>> # Instantiate operator - >>> op = dpf.operators.geo.rotate_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_coordinate_system = dpf.Field() - >>> op.inputs.coordinate_system.connect(my_coordinate_system) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.rotate_fc( + ... fields_container=my_fields_container, + ... coordinate_system=my_coordinate_system, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.rotate_fc(fields_container=my_fields_container,coordinate_system=my_coordinate_system) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, coordinate_system=None, config=None, server=None): - super().__init__(name="rotate_fc", config = config, server = server) + def __init__( + self, fields_container=None, coordinate_system=None, config=None, server=None + ): + super().__init__(name="rotate_fc", config=config, server=server) self._inputs = InputsRotateFc(self) self._outputs = OutputsRotateFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if coordinate_system !=None: + if coordinate_system is not None: self.inputs.coordinate_system.connect(coordinate_system) @staticmethod def _spec(): - spec = Specification(description="""Apply a transformation (rotation) matrix on all the fields of a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "coordinate_system", type_names=["field"], optional=False, document="""3-3 rotation matrix""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Apply a transformation (rotation) matrix on all the fields of a fields + container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="coordinate_system", + type_names=["field"], + optional=False, + document="""3-3 rotation matrix""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "rotate_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="rotate_fc", server=server) @property def inputs(self): @@ -68,117 +108,111 @@ def inputs(self): Returns -------- - inputs : InputsRotateFc + inputs : InputsRotateFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRotateFc + outputs : OutputsRotateFc """ return super().outputs -#internal name: rotate_fc -#scripting name: rotate_fc class InputsRotateFc(_Inputs): - """Intermediate class used to connect user inputs to rotate_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.rotate_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_coordinate_system = dpf.Field() - >>> op.inputs.coordinate_system.connect(my_coordinate_system) + """Intermediate class used to connect user inputs to + rotate_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.rotate_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) """ + def __init__(self, op: Operator): super().__init__(rotate_fc._spec().inputs, op) - self._fields_container = Input(rotate_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(rotate_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._coordinate_system = Input(rotate_fc._spec().input_pin(1), 1, op, -1) + self._coordinate_system = Input(rotate_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._coordinate_system) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def coordinate_system(self): - """Allows to connect coordinate_system input to the operator + """Allows to connect coordinate_system input to the operator. - - pindoc: 3-3 rotation matrix + 3-3 rotation matrix Parameters ---------- - my_coordinate_system : Field, + my_coordinate_system : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_fc() >>> op.inputs.coordinate_system.connect(my_coordinate_system) - >>> #or + >>> # or >>> op.inputs.coordinate_system(my_coordinate_system) - """ return self._coordinate_system + class OutputsRotateFc(_Outputs): - """Intermediate class used to get outputs from rotate_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.rotate_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + rotate_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.rotate_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(rotate_fc._spec().outputs, op) - self._fields_container = Output(rotate_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(rotate_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py b/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py index 2f5d045dcfc..487fc618ada 100644 --- a/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py +++ b/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py @@ -1,66 +1,117 @@ """ rotate_in_cylindrical_cs -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class rotate_in_cylindrical_cs(Operator): - """Rotate a field to its corresponding values into the specified cylindrical coordinate system (corresponding to the field position). If no coordinate system is set in the coordinate_system pin, field is rotated on each node following the local polar coordinate system. - - available inputs: - - field (Field, FieldsContainer) - - coordinate_system (Field) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_coordinate_system = dpf.Field() - >>> op.inputs.coordinate_system.connect(my_coordinate_system) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs(field=my_field,coordinate_system=my_coordinate_system) + """Rotate a field to its corresponding values into the specified + cylindrical coordinate system (corresponding to the field + position). If no coordinate system is set in the coordinate_system + pin, field is rotated on each node following the local polar + coordinate system. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + coordinate_system : Field, optional + 3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.rotate_in_cylindrical_cs( + ... field=my_field, + ... coordinate_system=my_coordinate_system, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, field=None, coordinate_system=None, config=None, server=None): - super().__init__(name="transform_cylindricalCS", config = config, server = server) + super().__init__(name="transform_cylindricalCS", config=config, server=server) self._inputs = InputsRotateInCylindricalCs(self) self._outputs = OutputsRotateInCylindricalCs(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if coordinate_system !=None: + if coordinate_system is not None: self.inputs.coordinate_system.connect(coordinate_system) @staticmethod def _spec(): - spec = Specification(description="""Rotate a field to its corresponding values into the specified cylindrical coordinate system (corresponding to the field position). If no coordinate system is set in the coordinate_system pin, field is rotated on each node following the local polar coordinate system.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "coordinate_system", type_names=["field"], optional=True, document="""3-3 rotation matrix and origin coordinates must be set here to define a coordinate system.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Rotate a field to its corresponding values into the specified + cylindrical coordinate system (corresponding to the field + position). If no coordinate system is set in the + coordinate_system pin, field is rotated on each node + following the local polar coordinate system.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="coordinate_system", + type_names=["field"], + optional=True, + document="""3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "transform_cylindricalCS") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="transform_cylindricalCS", server=server) @property def inputs(self): @@ -68,119 +119,120 @@ def inputs(self): Returns -------- - inputs : InputsRotateInCylindricalCs + inputs : InputsRotateInCylindricalCs """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRotateInCylindricalCs + outputs : OutputsRotateInCylindricalCs """ return super().outputs -#internal name: transform_cylindricalCS -#scripting name: rotate_in_cylindrical_cs class InputsRotateInCylindricalCs(_Inputs): - """Intermediate class used to connect user inputs to rotate_in_cylindrical_cs operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_coordinate_system = dpf.Field() - >>> op.inputs.coordinate_system.connect(my_coordinate_system) + """Intermediate class used to connect user inputs to + rotate_in_cylindrical_cs operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) """ + def __init__(self, op: Operator): super().__init__(rotate_in_cylindrical_cs._spec().inputs, op) - self._field = Input(rotate_in_cylindrical_cs._spec().input_pin(0), 0, op, -1) + self._field = Input(rotate_in_cylindrical_cs._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._coordinate_system = Input(rotate_in_cylindrical_cs._spec().input_pin(1), 1, op, -1) + self._coordinate_system = Input( + rotate_in_cylindrical_cs._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._coordinate_system) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def coordinate_system(self): - """Allows to connect coordinate_system input to the operator + """Allows to connect coordinate_system input to the operator. - - pindoc: 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. + 3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system. Parameters ---------- - my_coordinate_system : Field, + my_coordinate_system : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() >>> op.inputs.coordinate_system.connect(my_coordinate_system) - >>> #or + >>> # or >>> op.inputs.coordinate_system(my_coordinate_system) - """ return self._coordinate_system + class OutputsRotateInCylindricalCs(_Outputs): - """Intermediate class used to get outputs from rotate_in_cylindrical_cs operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + rotate_in_cylindrical_cs operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(rotate_in_cylindrical_cs._spec().outputs, op) - self._fields_container = Output(rotate_in_cylindrical_cs._spec().output_pin(0), 0, op) + self._fields_container = Output( + rotate_in_cylindrical_cs._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py b/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py index 6e96cfd0667..9b45918fd37 100644 --- a/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py +++ b/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py @@ -1,66 +1,121 @@ """ rotate_in_cylindrical_cs_fc -=========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class rotate_in_cylindrical_cs_fc(Operator): - """Rotate all the fields of a fields container (not defined with a cynlindrical coordinate system) to its corresponding values into the specified cylindrical coordinate system (corresponding to the field position). If no coordinate system is set in the coordinate_system pin, field is rotated on each node following the local polar coordinate system. - - available inputs: - - field (Field, FieldsContainer) - - coordinate_system (Field) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_coordinate_system = dpf.Field() - >>> op.inputs.coordinate_system.connect(my_coordinate_system) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc(field=my_field,coordinate_system=my_coordinate_system) + """Rotate all the fields of a fields container (not defined with a + cynlindrical coordinate system) to its corresponding values into + the specified cylindrical coordinate system (corresponding to the + field position). If no coordinate system is set in the + coordinate_system pin, field is rotated on each node following the + local polar coordinate system. + + Parameters + ---------- + field : Field or FieldsContainer + coordinate_system : Field, optional + 3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc( + ... field=my_field, + ... coordinate_system=my_coordinate_system, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, field=None, coordinate_system=None, config=None, server=None): - super().__init__(name="transform_cylindrical_cs_fc", config = config, server = server) + super().__init__( + name="transform_cylindrical_cs_fc", config=config, server=server + ) self._inputs = InputsRotateInCylindricalCsFc(self) self._outputs = OutputsRotateInCylindricalCsFc(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if coordinate_system !=None: + if coordinate_system is not None: self.inputs.coordinate_system.connect(coordinate_system) @staticmethod def _spec(): - spec = Specification(description="""Rotate all the fields of a fields container (not defined with a cynlindrical coordinate system) to its corresponding values into the specified cylindrical coordinate system (corresponding to the field position). If no coordinate system is set in the coordinate_system pin, field is rotated on each node following the local polar coordinate system.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "coordinate_system", type_names=["field"], optional=True, document="""3-3 rotation matrix and origin coordinates must be set here to define a coordinate system.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Rotate all the fields of a fields container (not defined with a + cynlindrical coordinate system) to its corresponding + values into the specified cylindrical coordinate system + (corresponding to the field position). If no coordinate + system is set in the coordinate_system pin, field is + rotated on each node following the local polar coordinate + system.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="coordinate_system", + type_names=["field"], + optional=True, + document="""3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "transform_cylindrical_cs_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="transform_cylindrical_cs_fc", server=server + ) @property def inputs(self): @@ -68,117 +123,117 @@ def inputs(self): Returns -------- - inputs : InputsRotateInCylindricalCsFc + inputs : InputsRotateInCylindricalCsFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRotateInCylindricalCsFc + outputs : OutputsRotateInCylindricalCsFc """ return super().outputs -#internal name: transform_cylindrical_cs_fc -#scripting name: rotate_in_cylindrical_cs_fc class InputsRotateInCylindricalCsFc(_Inputs): - """Intermediate class used to connect user inputs to rotate_in_cylindrical_cs_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_coordinate_system = dpf.Field() - >>> op.inputs.coordinate_system.connect(my_coordinate_system) + """Intermediate class used to connect user inputs to + rotate_in_cylindrical_cs_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) """ + def __init__(self, op: Operator): super().__init__(rotate_in_cylindrical_cs_fc._spec().inputs, op) - self._field = Input(rotate_in_cylindrical_cs_fc._spec().input_pin(0), 0, op, -1) + self._field = Input(rotate_in_cylindrical_cs_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._coordinate_system = Input(rotate_in_cylindrical_cs_fc._spec().input_pin(1), 1, op, -1) + self._coordinate_system = Input( + rotate_in_cylindrical_cs_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._coordinate_system) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def coordinate_system(self): - """Allows to connect coordinate_system input to the operator + """Allows to connect coordinate_system input to the operator. - - pindoc: 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. + 3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system. Parameters ---------- - my_coordinate_system : Field, + my_coordinate_system : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() >>> op.inputs.coordinate_system.connect(my_coordinate_system) - >>> #or + >>> # or >>> op.inputs.coordinate_system(my_coordinate_system) - """ return self._coordinate_system + class OutputsRotateInCylindricalCsFc(_Outputs): - """Intermediate class used to get outputs from rotate_in_cylindrical_cs_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + rotate_in_cylindrical_cs_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(rotate_in_cylindrical_cs_fc._spec().outputs, op) - self._fields_container = Output(rotate_in_cylindrical_cs_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + rotate_in_cylindrical_cs_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/geo/to_polar_coordinates.py b/ansys/dpf/core/operators/geo/to_polar_coordinates.py index 5c4f48a7805..c20da1e0d4c 100644 --- a/ansys/dpf/core/operators/geo/to_polar_coordinates.py +++ b/ansys/dpf/core/operators/geo/to_polar_coordinates.py @@ -1,66 +1,118 @@ """ to_polar_coordinates -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "geo" category -""" class to_polar_coordinates(Operator): - """Find r, theta (rad), z coordinates of a coordinates (nodal) field in cartesian coordinates system with respoect to the input coordinate system defining the rotation axis and the origin. - - available inputs: - - field (Field, FieldsContainer) - - coordinate_system (Field) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.to_polar_coordinates() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_coordinate_system = dpf.Field() - >>> op.inputs.coordinate_system.connect(my_coordinate_system) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.to_polar_coordinates(field=my_field,coordinate_system=my_coordinate_system) + """Find r, theta (rad), z coordinates of a coordinates (nodal) field in + cartesian coordinates system with respoect to the input coordinate + system defining the rotation axis and the origin. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + coordinate_system : Field, optional + 3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system. by default, the + rotation axis is the z axis and the + origin is [0,0,0] + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.geo.to_polar_coordinates() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.geo.to_polar_coordinates( + ... field=my_field, + ... coordinate_system=my_coordinate_system, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, field=None, coordinate_system=None, config=None, server=None): - super().__init__(name="polar_coordinates", config = config, server = server) + super().__init__(name="polar_coordinates", config=config, server=server) self._inputs = InputsToPolarCoordinates(self) self._outputs = OutputsToPolarCoordinates(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if coordinate_system !=None: + if coordinate_system is not None: self.inputs.coordinate_system.connect(coordinate_system) @staticmethod def _spec(): - spec = Specification(description="""Find r, theta (rad), z coordinates of a coordinates (nodal) field in cartesian coordinates system with respoect to the input coordinate system defining the rotation axis and the origin.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "coordinate_system", type_names=["field"], optional=True, document="""3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. By default, the rotation axis is the z axis and the origin is [0,0,0]""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Find r, theta (rad), z coordinates of a coordinates (nodal) field in + cartesian coordinates system with respoect to the input + coordinate system defining the rotation axis and the + origin.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="coordinate_system", + type_names=["field"], + optional=True, + document="""3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system. by default, the + rotation axis is the z axis and the + origin is [0,0,0]""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "polar_coordinates") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="polar_coordinates", server=server) @property def inputs(self): @@ -68,119 +120,120 @@ def inputs(self): Returns -------- - inputs : InputsToPolarCoordinates + inputs : InputsToPolarCoordinates """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsToPolarCoordinates + outputs : OutputsToPolarCoordinates """ return super().outputs -#internal name: polar_coordinates -#scripting name: to_polar_coordinates class InputsToPolarCoordinates(_Inputs): - """Intermediate class used to connect user inputs to to_polar_coordinates operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.to_polar_coordinates() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_coordinate_system = dpf.Field() - >>> op.inputs.coordinate_system.connect(my_coordinate_system) + """Intermediate class used to connect user inputs to + to_polar_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.to_polar_coordinates() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) """ + def __init__(self, op: Operator): super().__init__(to_polar_coordinates._spec().inputs, op) - self._field = Input(to_polar_coordinates._spec().input_pin(0), 0, op, -1) + self._field = Input(to_polar_coordinates._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._coordinate_system = Input(to_polar_coordinates._spec().input_pin(1), 1, op, -1) + self._coordinate_system = Input( + to_polar_coordinates._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._coordinate_system) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.to_polar_coordinates() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def coordinate_system(self): - """Allows to connect coordinate_system input to the operator + """Allows to connect coordinate_system input to the operator. - - pindoc: 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. By default, the rotation axis is the z axis and the origin is [0,0,0] + 3-3 rotation matrix and origin coordinates + must be set here to define a + coordinate system. by default, the + rotation axis is the z axis and the + origin is [0,0,0] Parameters ---------- - my_coordinate_system : Field, + my_coordinate_system : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.to_polar_coordinates() >>> op.inputs.coordinate_system.connect(my_coordinate_system) - >>> #or + >>> # or >>> op.inputs.coordinate_system(my_coordinate_system) - """ return self._coordinate_system + class OutputsToPolarCoordinates(_Outputs): - """Intermediate class used to get outputs from to_polar_coordinates operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.geo.to_polar_coordinates() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + to_polar_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.geo.to_polar_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(to_polar_coordinates._spec().outputs, op) - self._fields_container = Output(to_polar_coordinates._spec().output_pin(0), 0, op) - self._outputs.append(self._fields_container) + self._field = Output(to_polar_coordinates._spec().output_pin(0), 0, op) + self._outputs.append(self._field) @property - def fields_container(self): - """Allows to get fields_container output of the operator - + def field(self): + """Allows to get field output of the operator Returns ---------- - my_fields_container : FieldsContainer, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.to_polar_coordinates() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - return self._fields_container - + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/invariant/__init__.py b/ansys/dpf/core/operators/invariant/__init__.py index 09c3f368b76..2a893502be3 100644 --- a/ansys/dpf/core/operators/invariant/__init__.py +++ b/ansys/dpf/core/operators/invariant/__init__.py @@ -1,7 +1,9 @@ from .eigen_values import eigen_values from .principal_invariants import principal_invariants from .von_mises_eqv import von_mises_eqv +from .segalman_von_mises_eqv import segalman_von_mises_eqv from .von_mises_eqv_fc import von_mises_eqv_fc +from .segalman_von_mises_eqv_fc import segalman_von_mises_eqv_fc from .invariants import invariants from .eigen_values_fc import eigen_values_fc from .invariants_fc import invariants_fc diff --git a/ansys/dpf/core/operators/invariant/eigen_values.py b/ansys/dpf/core/operators/invariant/eigen_values.py index 2db6de149be..f4c0c7b3a8b 100644 --- a/ansys/dpf/core/operators/invariant/eigen_values.py +++ b/ansys/dpf/core/operators/invariant/eigen_values.py @@ -1,60 +1,92 @@ """ eigen_values -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "invariant" category -""" class eigen_values(Operator): """Computes the element-wise eigen values of a tensor field. - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.eigen_values() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.eigen_values() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.eigen_values( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.eigen_values(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="eig_values", config = config, server = server) + super().__init__(name="eig_values", config=config, server=server) self._inputs = InputsEigenValues(self) self._outputs = OutputsEigenValues(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise eigen values of a tensor field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes the element-wise eigen values of a tensor field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "eig_values") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="eig_values", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsEigenValues + inputs : InputsEigenValues """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEigenValues + outputs : OutputsEigenValues """ return super().outputs -#internal name: eig_values -#scripting name: eigen_values class InputsEigenValues(_Inputs): - """Intermediate class used to connect user inputs to eigen_values operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.eigen_values() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + eigen_values operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.eigen_values() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(eigen_values._spec().inputs, op) - self._field = Input(eigen_values._spec().input_pin(0), 0, op, -1) + self._field = Input(eigen_values._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.eigen_values() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsEigenValues(_Outputs): - """Intermediate class used to get outputs from eigen_values operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.eigen_values() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + eigen_values operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.eigen_values() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(eigen_values._spec().outputs, op) - self._field = Output(eigen_values._spec().output_pin(0), 0, op) + self._field = Output(eigen_values._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.eigen_values() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/invariant/eigen_values_fc.py b/ansys/dpf/core/operators/invariant/eigen_values_fc.py index 66d3eee741a..ffa2598d06a 100644 --- a/ansys/dpf/core/operators/invariant/eigen_values_fc.py +++ b/ansys/dpf/core/operators/invariant/eigen_values_fc.py @@ -1,60 +1,91 @@ """ eigen_values_fc =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "invariant" category -""" class eigen_values_fc(Operator): - """Computes the element-wise eigen values of all the tensor fields of a fields container. + """Computes the element-wise eigen values of all the tensor fields of a + fields container. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.eigen_values_fc() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.eigen_values_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.eigen_values_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.eigen_values_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="eig_values_fc", config = config, server = server) + super().__init__(name="eig_values_fc", config=config, server=server) self._inputs = InputsEigenValuesFc(self) self._outputs = OutputsEigenValuesFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise eigen values of all the tensor fields of a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes the element-wise eigen values of all the tensor fields of a + fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "eig_values_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="eig_values_fc", server=server) @property def inputs(self): @@ -62,91 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsEigenValuesFc + inputs : InputsEigenValuesFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEigenValuesFc + outputs : OutputsEigenValuesFc """ return super().outputs -#internal name: eig_values_fc -#scripting name: eigen_values_fc class InputsEigenValuesFc(_Inputs): - """Intermediate class used to connect user inputs to eigen_values_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.eigen_values_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + eigen_values_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.eigen_values_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(eigen_values_fc._spec().inputs, op) - self._fields_container = Input(eigen_values_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(eigen_values_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.eigen_values_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsEigenValuesFc(_Outputs): - """Intermediate class used to get outputs from eigen_values_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.eigen_values_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + eigen_values_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.eigen_values_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(eigen_values_fc._spec().outputs, op) - self._fields_container = Output(eigen_values_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(eigen_values_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.eigen_values_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/invariant/eigen_vectors.py b/ansys/dpf/core/operators/invariant/eigen_vectors.py index 75e50c2eaf5..4ae256918ec 100644 --- a/ansys/dpf/core/operators/invariant/eigen_vectors.py +++ b/ansys/dpf/core/operators/invariant/eigen_vectors.py @@ -1,60 +1,94 @@ """ eigen_vectors -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "invariant" category -""" class eigen_vectors(Operator): - """Computes the element-wise eigen vectors for each tensor in the fields of the field container + """Computes the element-wise eigen vectors for each tensor in the field + + Parameters + ---------- + field : FieldsContainer or Field + Field or fields container with only one field + is expected - available inputs: - - fields (FieldsContainer, Field) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.eigen_vectors() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.eigen_vectors() + >>> # Make input connections + >>> my_field = dpf.FieldsContainer() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_fields = dpf.FieldsContainer() - >>> op.inputs.fields.connect(my_fields) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.eigen_vectors( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.eigen_vectors(fields=my_fields) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields=None, config=None, server=None): - super().__init__(name="eig_vectors", config = config, server = server) + def __init__(self, field=None, config=None, server=None): + super().__init__(name="eig_vectors", config=config, server=server) self._inputs = InputsEigenVectors(self) self._outputs = OutputsEigenVectors(self) - if fields !=None: - self.inputs.fields.connect(fields) + if field is not None: + self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise eigen vectors for each tensor in the fields of the field container""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields", type_names=["fields_container","field"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = ( + """Computes the element-wise eigen vectors for each tensor in the field""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["fields_container", "field"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "eig_vectors") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="eig_vectors", server=server) @property def inputs(self): @@ -62,91 +96,90 @@ def inputs(self): Returns -------- - inputs : InputsEigenVectors + inputs : InputsEigenVectors """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEigenVectors + outputs : OutputsEigenVectors """ return super().outputs -#internal name: eig_vectors -#scripting name: eigen_vectors class InputsEigenVectors(_Inputs): - """Intermediate class used to connect user inputs to eigen_vectors operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.eigen_vectors() - >>> my_fields = dpf.FieldsContainer() - >>> op.inputs.fields.connect(my_fields) + """Intermediate class used to connect user inputs to + eigen_vectors operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.eigen_vectors() + >>> my_field = dpf.FieldsContainer() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(eigen_vectors._spec().inputs, op) - self._fields = Input(eigen_vectors._spec().input_pin(0), 0, op, -1) - self._inputs.append(self._fields) + self._field = Input(eigen_vectors._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._field) @property - def fields(self): - """Allows to connect fields input to the operator + def field(self): + """Allows to connect field input to the operator. + + Field or fields container with only one field + is expected Parameters ---------- - my_fields : FieldsContainer, Field, + my_field : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.eigen_vectors() - >>> op.inputs.fields.connect(my_fields) - >>> #or - >>> op.inputs.fields(my_fields) - + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) """ - return self._fields + return self._field + class OutputsEigenVectors(_Outputs): - """Intermediate class used to get outputs from eigen_vectors operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.eigen_vectors() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + eigen_vectors operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.eigen_vectors() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(eigen_vectors._spec().outputs, op) - self._fields_container = Output(eigen_vectors._spec().output_pin(0), 0, op) - self._outputs.append(self._fields_container) + self._field = Output(eigen_vectors._spec().output_pin(0), 0, op) + self._outputs.append(self._field) @property - def fields_container(self): - """Allows to get fields_container output of the operator - + def field(self): + """Allows to get field output of the operator Returns ---------- - my_fields_container : FieldsContainer, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.eigen_vectors() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - return self._fields_container - + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py b/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py index 368f8fea0f5..9aa2bca72a4 100644 --- a/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py +++ b/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py @@ -1,60 +1,91 @@ """ eigen_vectors_fc -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "invariant" category -""" class eigen_vectors_fc(Operator): - """Computes the element-wise eigen vectors for each tensor in the field + """Computes the element-wise eigen vectors for each tensor in the fields + of the field container + + Parameters + ---------- + fields_container : FieldsContainer or Field - available inputs: - - field (FieldsContainer, Field) - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.eigen_vectors_fc() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.eigen_vectors_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_field = dpf.FieldsContainer() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.eigen_vectors_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.eigen_vectors_fc(field=my_field) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, config=None, server=None): - super().__init__(name="eig_vectors_fc", config = config, server = server) + def __init__(self, fields_container=None, config=None, server=None): + super().__init__(name="eig_vectors_fc", config=config, server=server) self._inputs = InputsEigenVectorsFc(self) self._outputs = OutputsEigenVectorsFc(self) - if field !=None: - self.inputs.field.connect(field) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise eigen vectors for each tensor in the field""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["fields_container","field"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes the element-wise eigen vectors for each tensor in the fields + of the field container""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container", "field"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "eig_vectors_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="eig_vectors_fc", server=server) @property def inputs(self): @@ -62,93 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsEigenVectorsFc + inputs : InputsEigenVectorsFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEigenVectorsFc + outputs : OutputsEigenVectorsFc """ return super().outputs -#internal name: eig_vectors_fc -#scripting name: eigen_vectors_fc class InputsEigenVectorsFc(_Inputs): - """Intermediate class used to connect user inputs to eigen_vectors_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.eigen_vectors_fc() - >>> my_field = dpf.FieldsContainer() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + eigen_vectors_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.eigen_vectors_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(eigen_vectors_fc._spec().inputs, op) - self._field = Input(eigen_vectors_fc._spec().input_pin(0), 0, op, -1) - self._inputs.append(self._field) + self._fields_container = Input(eigen_vectors_fc._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fields_container) @property - def field(self): - """Allows to connect field input to the operator - - - pindoc: field or fields container with only one field is expected + def fields_container(self): + """Allows to connect fields_container input to the operator. Parameters ---------- - my_field : FieldsContainer, Field, + my_fields_container : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.eigen_vectors_fc() - >>> op.inputs.field.connect(my_field) - >>> #or - >>> op.inputs.field(my_field) - + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) """ - return self._field + return self._fields_container + class OutputsEigenVectorsFc(_Outputs): - """Intermediate class used to get outputs from eigen_vectors_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.eigen_vectors_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + eigen_vectors_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.eigen_vectors_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(eigen_vectors_fc._spec().outputs, op) - self._field = Output(eigen_vectors_fc._spec().output_pin(0), 0, op) - self._outputs.append(self._field) + self._fields_container = Output(eigen_vectors_fc._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) @property - def field(self): - """Allows to get field output of the operator - + def fields_container(self): + """Allows to get fields_container output of the operator Returns ---------- - my_field : Field, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.eigen_vectors_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ - return self._field - + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/invariant/invariants.py b/ansys/dpf/core/operators/invariant/invariants.py index 35dc4544234..9bdc33143a5 100644 --- a/ansys/dpf/core/operators/invariant/invariants.py +++ b/ansys/dpf/core/operators/invariant/invariants.py @@ -1,66 +1,103 @@ """ invariants -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "invariant" category -""" class invariants(Operator): """Computes the element-wise invariants of a tensor field. - available inputs: - - field (Field) + Parameters + ---------- + field : Field + - available outputs: - - field_int (Field) - - field_eqv (Field) - - field_max_shear (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.invariants() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.invariants() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.invariants( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.invariants(field=my_field) + >>> # Get output data + >>> result_field_int = op.outputs.field_int() + >>> result_field_eqv = op.outputs.field_eqv() + >>> result_field_max_shear = op.outputs.field_max_shear() + """ - >>> # Get output data - >>> result_field_int = op.outputs.field_int() - >>> result_field_eqv = op.outputs.field_eqv() - >>> result_field_max_shear = op.outputs.field_max_shear()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="invariants_deriv", config = config, server = server) + super().__init__(name="invariants_deriv", config=config, server=server) self._inputs = InputsInvariants(self) self._outputs = OutputsInvariants(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise invariants of a tensor field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_int", type_names=["field"], optional=False, document="""stress intensity field"""), - 1 : PinSpecification(name = "field_eqv", type_names=["field"], optional=False, document="""stress equivalent intensity"""), - 2 : PinSpecification(name = "field_max_shear", type_names=["field"], optional=False, document="""max shear stress field""")}) + description = """Computes the element-wise invariants of a tensor field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_int", + type_names=["field"], + optional=False, + document="""Stress intensity field""", + ), + 1: PinSpecification( + name="field_eqv", + type_names=["field"], + optional=False, + document="""Stress equivalent intensity""", + ), + 2: PinSpecification( + name="field_max_shear", + type_names=["field"], + optional=False, + document="""Max shear stress field""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "invariants_deriv") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="invariants_deriv", server=server) @property def inputs(self): @@ -68,141 +105,127 @@ def inputs(self): Returns -------- - inputs : InputsInvariants + inputs : InputsInvariants """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsInvariants + outputs : OutputsInvariants """ return super().outputs -#internal name: invariants_deriv -#scripting name: invariants class InputsInvariants(_Inputs): - """Intermediate class used to connect user inputs to invariants operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.invariants() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + invariants operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.invariants() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(invariants._spec().inputs, op) - self._field = Input(invariants._spec().input_pin(0), 0, op, -1) + self._field = Input(invariants._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.invariants() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsInvariants(_Outputs): - """Intermediate class used to get outputs from invariants operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.invariants() - >>> # Connect inputs : op.inputs. ... - >>> result_field_int = op.outputs.field_int() - >>> result_field_eqv = op.outputs.field_eqv() - >>> result_field_max_shear = op.outputs.field_max_shear() + """Intermediate class used to get outputs from + invariants operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.invariants() + >>> # Connect inputs : op.inputs. ... + >>> result_field_int = op.outputs.field_int() + >>> result_field_eqv = op.outputs.field_eqv() + >>> result_field_max_shear = op.outputs.field_max_shear() """ + def __init__(self, op: Operator): super().__init__(invariants._spec().outputs, op) - self._field_int = Output(invariants._spec().output_pin(0), 0, op) + self._field_int = Output(invariants._spec().output_pin(0), 0, op) self._outputs.append(self._field_int) - self._field_eqv = Output(invariants._spec().output_pin(1), 1, op) + self._field_eqv = Output(invariants._spec().output_pin(1), 1, op) self._outputs.append(self._field_eqv) - self._field_max_shear = Output(invariants._spec().output_pin(2), 2, op) + self._field_max_shear = Output(invariants._spec().output_pin(2), 2, op) self._outputs.append(self._field_max_shear) @property def field_int(self): """Allows to get field_int output of the operator - - - pindoc: stress intensity field - Returns ---------- - my_field_int : Field, + my_field_int : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.invariants() >>> # Connect inputs : op.inputs. ... - >>> result_field_int = op.outputs.field_int() - """ + >>> result_field_int = op.outputs.field_int() + """ # noqa: E501 return self._field_int @property def field_eqv(self): """Allows to get field_eqv output of the operator - - - pindoc: stress equivalent intensity - Returns ---------- - my_field_eqv : Field, + my_field_eqv : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.invariants() >>> # Connect inputs : op.inputs. ... - >>> result_field_eqv = op.outputs.field_eqv() - """ + >>> result_field_eqv = op.outputs.field_eqv() + """ # noqa: E501 return self._field_eqv @property def field_max_shear(self): """Allows to get field_max_shear output of the operator - - - pindoc: max shear stress field - Returns ---------- - my_field_max_shear : Field, + my_field_max_shear : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.invariants() >>> # Connect inputs : op.inputs. ... - >>> result_field_max_shear = op.outputs.field_max_shear() - """ + >>> result_field_max_shear = op.outputs.field_max_shear() + """ # noqa: E501 return self._field_max_shear - diff --git a/ansys/dpf/core/operators/invariant/invariants_fc.py b/ansys/dpf/core/operators/invariant/invariants_fc.py index e313543c2de..2762f9b1e33 100644 --- a/ansys/dpf/core/operators/invariant/invariants_fc.py +++ b/ansys/dpf/core/operators/invariant/invariants_fc.py @@ -1,66 +1,105 @@ """ invariants_fc -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "invariant" category -""" class invariants_fc(Operator): - """Computes the element-wise invariants of all the tensor fields of a fields container. + """Computes the element-wise invariants of all the tensor fields of a + fields container. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_int (FieldsContainer) - - fields_eqv (FieldsContainer) - - fields_max_shear (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.invariants_fc() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.invariants_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.invariants_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.invariants_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_int = op.outputs.fields_int() + >>> result_fields_eqv = op.outputs.fields_eqv() + >>> result_fields_max_shear = op.outputs.fields_max_shear() + """ - >>> # Get output data - >>> result_fields_int = op.outputs.fields_int() - >>> result_fields_eqv = op.outputs.fields_eqv() - >>> result_fields_max_shear = op.outputs.fields_max_shear()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="invariants_deriv_fc", config = config, server = server) + super().__init__(name="invariants_deriv_fc", config=config, server=server) self._inputs = InputsInvariantsFc(self) self._outputs = OutputsInvariantsFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise invariants of all the tensor fields of a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_int", type_names=["fields_container"], optional=False, document="""stress intensity field"""), - 1 : PinSpecification(name = "fields_eqv", type_names=["fields_container"], optional=False, document="""stress equivalent intensity"""), - 2 : PinSpecification(name = "fields_max_shear", type_names=["fields_container"], optional=False, document="""max shear stress field""")}) + description = """Computes the element-wise invariants of all the tensor fields of a + fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_int", + type_names=["fields_container"], + optional=False, + document="""Stress intensity field""", + ), + 1: PinSpecification( + name="fields_eqv", + type_names=["fields_container"], + optional=False, + document="""Stress equivalent intensity""", + ), + 2: PinSpecification( + name="fields_max_shear", + type_names=["fields_container"], + optional=False, + document="""Max shear stress field""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "invariants_deriv_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="invariants_deriv_fc", server=server) @property def inputs(self): @@ -68,141 +107,127 @@ def inputs(self): Returns -------- - inputs : InputsInvariantsFc + inputs : InputsInvariantsFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsInvariantsFc + outputs : OutputsInvariantsFc """ return super().outputs -#internal name: invariants_deriv_fc -#scripting name: invariants_fc class InputsInvariantsFc(_Inputs): - """Intermediate class used to connect user inputs to invariants_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.invariants_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + invariants_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.invariants_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(invariants_fc._spec().inputs, op) - self._fields_container = Input(invariants_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(invariants_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.invariants_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsInvariantsFc(_Outputs): - """Intermediate class used to get outputs from invariants_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.invariants_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_int = op.outputs.fields_int() - >>> result_fields_eqv = op.outputs.fields_eqv() - >>> result_fields_max_shear = op.outputs.fields_max_shear() + """Intermediate class used to get outputs from + invariants_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.invariants_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_int = op.outputs.fields_int() + >>> result_fields_eqv = op.outputs.fields_eqv() + >>> result_fields_max_shear = op.outputs.fields_max_shear() """ + def __init__(self, op: Operator): super().__init__(invariants_fc._spec().outputs, op) - self._fields_int = Output(invariants_fc._spec().output_pin(0), 0, op) + self._fields_int = Output(invariants_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_int) - self._fields_eqv = Output(invariants_fc._spec().output_pin(1), 1, op) + self._fields_eqv = Output(invariants_fc._spec().output_pin(1), 1, op) self._outputs.append(self._fields_eqv) - self._fields_max_shear = Output(invariants_fc._spec().output_pin(2), 2, op) + self._fields_max_shear = Output(invariants_fc._spec().output_pin(2), 2, op) self._outputs.append(self._fields_max_shear) @property def fields_int(self): """Allows to get fields_int output of the operator - - - pindoc: stress intensity field - Returns ---------- - my_fields_int : FieldsContainer, + my_fields_int : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.invariants_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_int = op.outputs.fields_int() - """ + >>> result_fields_int = op.outputs.fields_int() + """ # noqa: E501 return self._fields_int @property def fields_eqv(self): """Allows to get fields_eqv output of the operator - - - pindoc: stress equivalent intensity - Returns ---------- - my_fields_eqv : FieldsContainer, + my_fields_eqv : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.invariants_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_eqv = op.outputs.fields_eqv() - """ + >>> result_fields_eqv = op.outputs.fields_eqv() + """ # noqa: E501 return self._fields_eqv @property def fields_max_shear(self): """Allows to get fields_max_shear output of the operator - - - pindoc: max shear stress field - Returns ---------- - my_fields_max_shear : FieldsContainer, + my_fields_max_shear : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.invariants_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_max_shear = op.outputs.fields_max_shear() - """ + >>> result_fields_max_shear = op.outputs.fields_max_shear() + """ # noqa: E501 return self._fields_max_shear - diff --git a/ansys/dpf/core/operators/invariant/principal_invariants.py b/ansys/dpf/core/operators/invariant/principal_invariants.py index 913bdd3f26b..fbba49da0bf 100644 --- a/ansys/dpf/core/operators/invariant/principal_invariants.py +++ b/ansys/dpf/core/operators/invariant/principal_invariants.py @@ -1,66 +1,103 @@ """ principal_invariants -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "invariant" category -""" class principal_invariants(Operator): """Computes the element-wise eigen values of a tensor field - available inputs: - - field (Field) + Parameters + ---------- + field : Field + - available outputs: - - field_eig_1 (Field) - - field_eig_2 (Field) - - field_eig_3 (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.principal_invariants() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.principal_invariants() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.principal_invariants( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.principal_invariants(field=my_field) + >>> # Get output data + >>> result_field_eig_1 = op.outputs.field_eig_1() + >>> result_field_eig_2 = op.outputs.field_eig_2() + >>> result_field_eig_3 = op.outputs.field_eig_3() + """ - >>> # Get output data - >>> result_field_eig_1 = op.outputs.field_eig_1() - >>> result_field_eig_2 = op.outputs.field_eig_2() - >>> result_field_eig_3 = op.outputs.field_eig_3()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="invariants", config = config, server = server) + super().__init__(name="invariants", config=config, server=server) self._inputs = InputsPrincipalInvariants(self) self._outputs = OutputsPrincipalInvariants(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise eigen values of a tensor field""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_eig_1", type_names=["field"], optional=False, document="""first eigen value field"""), - 1 : PinSpecification(name = "field_eig_2", type_names=["field"], optional=False, document="""second eigen value field"""), - 2 : PinSpecification(name = "field_eig_3", type_names=["field"], optional=False, document="""third eigen value field""")}) + description = """Computes the element-wise eigen values of a tensor field""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_eig_1", + type_names=["field"], + optional=False, + document="""First eigen value field""", + ), + 1: PinSpecification( + name="field_eig_2", + type_names=["field"], + optional=False, + document="""Second eigen value field""", + ), + 2: PinSpecification( + name="field_eig_3", + type_names=["field"], + optional=False, + document="""Third eigen value field""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "invariants") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="invariants", server=server) @property def inputs(self): @@ -68,141 +105,127 @@ def inputs(self): Returns -------- - inputs : InputsPrincipalInvariants + inputs : InputsPrincipalInvariants """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPrincipalInvariants + outputs : OutputsPrincipalInvariants """ return super().outputs -#internal name: invariants -#scripting name: principal_invariants class InputsPrincipalInvariants(_Inputs): - """Intermediate class used to connect user inputs to principal_invariants operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.principal_invariants() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + principal_invariants operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.principal_invariants() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(principal_invariants._spec().inputs, op) - self._field = Input(principal_invariants._spec().input_pin(0), 0, op, -1) + self._field = Input(principal_invariants._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.principal_invariants() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsPrincipalInvariants(_Outputs): - """Intermediate class used to get outputs from principal_invariants operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.principal_invariants() - >>> # Connect inputs : op.inputs. ... - >>> result_field_eig_1 = op.outputs.field_eig_1() - >>> result_field_eig_2 = op.outputs.field_eig_2() - >>> result_field_eig_3 = op.outputs.field_eig_3() + """Intermediate class used to get outputs from + principal_invariants operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.principal_invariants() + >>> # Connect inputs : op.inputs. ... + >>> result_field_eig_1 = op.outputs.field_eig_1() + >>> result_field_eig_2 = op.outputs.field_eig_2() + >>> result_field_eig_3 = op.outputs.field_eig_3() """ + def __init__(self, op: Operator): super().__init__(principal_invariants._spec().outputs, op) - self._field_eig_1 = Output(principal_invariants._spec().output_pin(0), 0, op) + self._field_eig_1 = Output(principal_invariants._spec().output_pin(0), 0, op) self._outputs.append(self._field_eig_1) - self._field_eig_2 = Output(principal_invariants._spec().output_pin(1), 1, op) + self._field_eig_2 = Output(principal_invariants._spec().output_pin(1), 1, op) self._outputs.append(self._field_eig_2) - self._field_eig_3 = Output(principal_invariants._spec().output_pin(2), 2, op) + self._field_eig_3 = Output(principal_invariants._spec().output_pin(2), 2, op) self._outputs.append(self._field_eig_3) @property def field_eig_1(self): """Allows to get field_eig_1 output of the operator - - - pindoc: first eigen value field - Returns ---------- - my_field_eig_1 : Field, + my_field_eig_1 : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.principal_invariants() >>> # Connect inputs : op.inputs. ... - >>> result_field_eig_1 = op.outputs.field_eig_1() - """ + >>> result_field_eig_1 = op.outputs.field_eig_1() + """ # noqa: E501 return self._field_eig_1 @property def field_eig_2(self): """Allows to get field_eig_2 output of the operator - - - pindoc: second eigen value field - Returns ---------- - my_field_eig_2 : Field, + my_field_eig_2 : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.principal_invariants() >>> # Connect inputs : op.inputs. ... - >>> result_field_eig_2 = op.outputs.field_eig_2() - """ + >>> result_field_eig_2 = op.outputs.field_eig_2() + """ # noqa: E501 return self._field_eig_2 @property def field_eig_3(self): """Allows to get field_eig_3 output of the operator - - - pindoc: third eigen value field - Returns ---------- - my_field_eig_3 : Field, + my_field_eig_3 : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.principal_invariants() >>> # Connect inputs : op.inputs. ... - >>> result_field_eig_3 = op.outputs.field_eig_3() - """ + >>> result_field_eig_3 = op.outputs.field_eig_3() + """ # noqa: E501 return self._field_eig_3 - diff --git a/ansys/dpf/core/operators/invariant/principal_invariants_fc.py b/ansys/dpf/core/operators/invariant/principal_invariants_fc.py index e569ece50c4..d4459d0b1f7 100644 --- a/ansys/dpf/core/operators/invariant/principal_invariants_fc.py +++ b/ansys/dpf/core/operators/invariant/principal_invariants_fc.py @@ -1,66 +1,105 @@ """ principal_invariants_fc -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "invariant" category -""" class principal_invariants_fc(Operator): - """Computes the element-wise eigen values of all the tensor fields of a fields container. + """Computes the element-wise eigen values of all the tensor fields of a + fields container. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_eig_1 (FieldsContainer) - - fields_eig_2 (FieldsContainer) - - fields_eig_3 (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.principal_invariants_fc() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.principal_invariants_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.principal_invariants_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.principal_invariants_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_eig_1 = op.outputs.fields_eig_1() + >>> result_fields_eig_2 = op.outputs.fields_eig_2() + >>> result_fields_eig_3 = op.outputs.fields_eig_3() + """ - >>> # Get output data - >>> result_fields_eig_1 = op.outputs.fields_eig_1() - >>> result_fields_eig_2 = op.outputs.fields_eig_2() - >>> result_fields_eig_3 = op.outputs.fields_eig_3()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="invariants_fc", config = config, server = server) + super().__init__(name="invariants_fc", config=config, server=server) self._inputs = InputsPrincipalInvariantsFc(self) self._outputs = OutputsPrincipalInvariantsFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise eigen values of all the tensor fields of a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_eig_1", type_names=["fields_container"], optional=False, document="""first eigen value fields"""), - 1 : PinSpecification(name = "fields_eig_2", type_names=["fields_container"], optional=False, document="""second eigen value fields"""), - 2 : PinSpecification(name = "fields_eig_3", type_names=["fields_container"], optional=False, document="""third eigen value fields""")}) + description = """Computes the element-wise eigen values of all the tensor fields of a + fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_eig_1", + type_names=["fields_container"], + optional=False, + document="""First eigen value fields""", + ), + 1: PinSpecification( + name="fields_eig_2", + type_names=["fields_container"], + optional=False, + document="""Second eigen value fields""", + ), + 2: PinSpecification( + name="fields_eig_3", + type_names=["fields_container"], + optional=False, + document="""Third eigen value fields""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "invariants_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="invariants_fc", server=server) @property def inputs(self): @@ -68,141 +107,135 @@ def inputs(self): Returns -------- - inputs : InputsPrincipalInvariantsFc + inputs : InputsPrincipalInvariantsFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPrincipalInvariantsFc + outputs : OutputsPrincipalInvariantsFc """ return super().outputs -#internal name: invariants_fc -#scripting name: principal_invariants_fc class InputsPrincipalInvariantsFc(_Inputs): - """Intermediate class used to connect user inputs to principal_invariants_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.principal_invariants_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + principal_invariants_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.principal_invariants_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(principal_invariants_fc._spec().inputs, op) - self._fields_container = Input(principal_invariants_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + principal_invariants_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.principal_invariants_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsPrincipalInvariantsFc(_Outputs): - """Intermediate class used to get outputs from principal_invariants_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.principal_invariants_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_eig_1 = op.outputs.fields_eig_1() - >>> result_fields_eig_2 = op.outputs.fields_eig_2() - >>> result_fields_eig_3 = op.outputs.fields_eig_3() + """Intermediate class used to get outputs from + principal_invariants_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.principal_invariants_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_eig_1 = op.outputs.fields_eig_1() + >>> result_fields_eig_2 = op.outputs.fields_eig_2() + >>> result_fields_eig_3 = op.outputs.fields_eig_3() """ + def __init__(self, op: Operator): super().__init__(principal_invariants_fc._spec().outputs, op) - self._fields_eig_1 = Output(principal_invariants_fc._spec().output_pin(0), 0, op) + self._fields_eig_1 = Output( + principal_invariants_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_eig_1) - self._fields_eig_2 = Output(principal_invariants_fc._spec().output_pin(1), 1, op) + self._fields_eig_2 = Output( + principal_invariants_fc._spec().output_pin(1), 1, op + ) self._outputs.append(self._fields_eig_2) - self._fields_eig_3 = Output(principal_invariants_fc._spec().output_pin(2), 2, op) + self._fields_eig_3 = Output( + principal_invariants_fc._spec().output_pin(2), 2, op + ) self._outputs.append(self._fields_eig_3) @property def fields_eig_1(self): """Allows to get fields_eig_1 output of the operator - - - pindoc: first eigen value fields - Returns ---------- - my_fields_eig_1 : FieldsContainer, + my_fields_eig_1 : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.principal_invariants_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_eig_1 = op.outputs.fields_eig_1() - """ + >>> result_fields_eig_1 = op.outputs.fields_eig_1() + """ # noqa: E501 return self._fields_eig_1 @property def fields_eig_2(self): """Allows to get fields_eig_2 output of the operator - - - pindoc: second eigen value fields - Returns ---------- - my_fields_eig_2 : FieldsContainer, + my_fields_eig_2 : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.principal_invariants_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_eig_2 = op.outputs.fields_eig_2() - """ + >>> result_fields_eig_2 = op.outputs.fields_eig_2() + """ # noqa: E501 return self._fields_eig_2 @property def fields_eig_3(self): """Allows to get fields_eig_3 output of the operator - - - pindoc: third eigen value fields - Returns ---------- - my_fields_eig_3 : FieldsContainer, + my_fields_eig_3 : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.principal_invariants_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_eig_3 = op.outputs.fields_eig_3() - """ + >>> result_fields_eig_3 = op.outputs.fields_eig_3() + """ # noqa: E501 return self._fields_eig_3 - diff --git a/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py b/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py new file mode 100644 index 00000000000..827f805d7cd --- /dev/null +++ b/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py @@ -0,0 +1,185 @@ +""" +segalman_von_mises_eqv +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class segalman_von_mises_eqv(Operator): + """Computes the element-wise Segalman Von-Mises criteria on a tensor + field. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.invariant.segalman_von_mises_eqv() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.segalman_von_mises_eqv( + ... field=my_field, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, field=None, config=None, server=None): + super().__init__(name="segalmaneqv", config=config, server=server) + self._inputs = InputsSegalmanVonMisesEqv(self) + self._outputs = OutputsSegalmanVonMisesEqv(self) + if field is not None: + self.inputs.field.connect(field) + + @staticmethod + def _spec(): + description = """Computes the element-wise Segalman Von-Mises criteria on a tensor + field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="segalmaneqv", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsSegalmanVonMisesEqv + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsSegalmanVonMisesEqv + """ + return super().outputs + + +class InputsSegalmanVonMisesEqv(_Inputs): + """Intermediate class used to connect user inputs to + segalman_von_mises_eqv operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.segalman_von_mises_eqv() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + """ + + def __init__(self, op: Operator): + super().__init__(segalman_von_mises_eqv._spec().inputs, op) + self._field = Input(segalman_von_mises_eqv._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._field) + + @property + def field(self): + """Allows to connect field input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_field : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.segalman_von_mises_eqv() + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) + """ + return self._field + + +class OutputsSegalmanVonMisesEqv(_Outputs): + """Intermediate class used to get outputs from + segalman_von_mises_eqv operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.segalman_von_mises_eqv() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(segalman_von_mises_eqv._spec().outputs, op) + self._field = Output(segalman_von_mises_eqv._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.segalman_von_mises_eqv() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py b/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py new file mode 100644 index 00000000000..bd38887b43d --- /dev/null +++ b/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py @@ -0,0 +1,183 @@ +""" +segalman_von_mises_eqv_fc +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class segalman_von_mises_eqv_fc(Operator): + """Computes the element-wise Segalman Von-Mises criteria on all the + tensor fields of a fields container. + + Parameters + ---------- + fields_container : FieldsContainer + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.invariant.segalman_von_mises_eqv_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.segalman_von_mises_eqv_fc( + ... fields_container=my_fields_container, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, fields_container=None, config=None, server=None): + super().__init__(name="segalmaneqv_fc", config=config, server=server) + self._inputs = InputsSegalmanVonMisesEqvFc(self) + self._outputs = OutputsSegalmanVonMisesEqvFc(self) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + + @staticmethod + def _spec(): + description = """Computes the element-wise Segalman Von-Mises criteria on all the + tensor fields of a fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="segalmaneqv_fc", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsSegalmanVonMisesEqvFc + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsSegalmanVonMisesEqvFc + """ + return super().outputs + + +class InputsSegalmanVonMisesEqvFc(_Inputs): + """Intermediate class used to connect user inputs to + segalman_von_mises_eqv_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.segalman_von_mises_eqv_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + """ + + def __init__(self, op: Operator): + super().__init__(segalman_von_mises_eqv_fc._spec().inputs, op) + self._fields_container = Input( + segalman_von_mises_eqv_fc._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.segalman_von_mises_eqv_fc() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + +class OutputsSegalmanVonMisesEqvFc(_Outputs): + """Intermediate class used to get outputs from + segalman_von_mises_eqv_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.segalman_von_mises_eqv_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(segalman_von_mises_eqv_fc._spec().outputs, op) + self._fields_container = Output( + segalman_von_mises_eqv_fc._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.segalman_von_mises_eqv_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/invariant/von_mises_eqv.py b/ansys/dpf/core/operators/invariant/von_mises_eqv.py index 0b8ae22dace..5987402eaaf 100644 --- a/ansys/dpf/core/operators/invariant/von_mises_eqv.py +++ b/ansys/dpf/core/operators/invariant/von_mises_eqv.py @@ -1,60 +1,94 @@ """ von_mises_eqv -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "invariant" category -""" class von_mises_eqv(Operator): """Computes the element-wise Von-Mises criteria on a tensor field. - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.von_mises_eqv() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.von_mises_eqv() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.von_mises_eqv( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.von_mises_eqv(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="eqv", config = config, server = server) + super().__init__(name="eqv", config=config, server=server) self._inputs = InputsVonMisesEqv(self) self._outputs = OutputsVonMisesEqv(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise Von-Mises criteria on a tensor field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = ( + """Computes the element-wise Von-Mises criteria on a tensor field.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "eqv") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="eqv", server=server) @property def inputs(self): @@ -62,93 +96,90 @@ def inputs(self): Returns -------- - inputs : InputsVonMisesEqv + inputs : InputsVonMisesEqv """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsVonMisesEqv + outputs : OutputsVonMisesEqv """ return super().outputs -#internal name: eqv -#scripting name: von_mises_eqv class InputsVonMisesEqv(_Inputs): - """Intermediate class used to connect user inputs to von_mises_eqv operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.von_mises_eqv() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + von_mises_eqv operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.von_mises_eqv() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(von_mises_eqv._spec().inputs, op) - self._field = Input(von_mises_eqv._spec().input_pin(0), 0, op, -1) + self._field = Input(von_mises_eqv._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.von_mises_eqv() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsVonMisesEqv(_Outputs): - """Intermediate class used to get outputs from von_mises_eqv operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.von_mises_eqv() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + von_mises_eqv operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.von_mises_eqv() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(von_mises_eqv._spec().outputs, op) - self._field = Output(von_mises_eqv._spec().output_pin(0), 0, op) + self._field = Output(von_mises_eqv._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.von_mises_eqv() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py b/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py index 043dc7d81e9..47357babf9e 100644 --- a/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py +++ b/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py @@ -1,60 +1,91 @@ """ von_mises_eqv_fc -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "invariant" category -""" class von_mises_eqv_fc(Operator): - """Computes the element-wise Von-Mises criteria on all the tensor fields of a fields container. + """Computes the element-wise Von-Mises criteria on all the tensor fields + of a fields container. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.invariant.von_mises_eqv_fc() - >>> # Instantiate operator - >>> op = dpf.operators.invariant.von_mises_eqv_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.invariant.von_mises_eqv_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.invariant.von_mises_eqv_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="eqv_fc", config = config, server = server) + super().__init__(name="eqv_fc", config=config, server=server) self._inputs = InputsVonMisesEqvFc(self) self._outputs = OutputsVonMisesEqvFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise Von-Mises criteria on all the tensor fields of a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes the element-wise Von-Mises criteria on all the tensor fields + of a fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "eqv_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="eqv_fc", server=server) @property def inputs(self): @@ -62,91 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsVonMisesEqvFc + inputs : InputsVonMisesEqvFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsVonMisesEqvFc + outputs : OutputsVonMisesEqvFc """ return super().outputs -#internal name: eqv_fc -#scripting name: von_mises_eqv_fc class InputsVonMisesEqvFc(_Inputs): - """Intermediate class used to connect user inputs to von_mises_eqv_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.von_mises_eqv_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + von_mises_eqv_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.von_mises_eqv_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(von_mises_eqv_fc._spec().inputs, op) - self._fields_container = Input(von_mises_eqv_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(von_mises_eqv_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.von_mises_eqv_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsVonMisesEqvFc(_Outputs): - """Intermediate class used to get outputs from von_mises_eqv_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.invariant.von_mises_eqv_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + von_mises_eqv_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.invariant.von_mises_eqv_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(von_mises_eqv_fc._spec().outputs, op) - self._fields_container = Output(von_mises_eqv_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(von_mises_eqv_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.invariant.von_mises_eqv_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/logic/__init__.py b/ansys/dpf/core/operators/logic/__init__.py index 4618bddf28c..7562d437032 100644 --- a/ansys/dpf/core/operators/logic/__init__.py +++ b/ansys/dpf/core/operators/logic/__init__.py @@ -2,7 +2,6 @@ from .component_selector_fc import component_selector_fc from .component_selector import component_selector from .identical_property_fields import identical_property_fields -from .merge_fields_by_label import merge_fields_by_label from .solid_shell_fields import solid_shell_fields from .identical_fields import identical_fields from .included_fields import included_fields diff --git a/ansys/dpf/core/operators/logic/component_selector.py b/ansys/dpf/core/operators/logic/component_selector.py index fe8508ffdfa..f2500525dc3 100644 --- a/ansys/dpf/core/operators/logic/component_selector.py +++ b/ansys/dpf/core/operators/logic/component_selector.py @@ -1,72 +1,128 @@ """ component_selector -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class component_selector(Operator): """Create a scalar/vector field based on the selected component. - available inputs: - - field (Field, FieldsContainer) - - component_number (int, list) - - default_value (float) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.logic.component_selector() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_component_number = int() - >>> op.inputs.component_number.connect(my_component_number) - >>> my_default_value = float() - >>> op.inputs.default_value.connect(my_default_value) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.component_selector(field=my_field,component_number=my_component_number,default_value=my_default_value) + Parameters + ---------- + field : Field or FieldsContainer + component_number : int + One or several component index that will be + extracted from the initial field. + default_value : float, optional + Set a default value for components that do + not exist + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.logic.component_selector() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_component_number = int() + >>> op.inputs.component_number.connect(my_component_number) + >>> my_default_value = float() + >>> op.inputs.default_value.connect(my_default_value) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.component_selector( + ... field=my_field, + ... component_number=my_component_number, + ... default_value=my_default_value, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, component_number=None, default_value=None, config=None, server=None): - super().__init__(name="component_selector", config = config, server = server) + def __init__( + self, + field=None, + component_number=None, + default_value=None, + config=None, + server=None, + ): + super().__init__(name="component_selector", config=config, server=server) self._inputs = InputsComponentSelector(self) self._outputs = OutputsComponentSelector(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if component_number !=None: + if component_number is not None: self.inputs.component_number.connect(component_number) - if default_value !=None: + if default_value is not None: self.inputs.default_value.connect(default_value) @staticmethod def _spec(): - spec = Specification(description="""Create a scalar/vector field based on the selected component.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "component_number", type_names=["int32","vector"], optional=False, document="""one or several component index that will be extracted from the initial field."""), - 2 : PinSpecification(name = "default_value", type_names=["double"], optional=True, document="""set a default value for components that do not exist""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = ( + """Create a scalar/vector field based on the selected component.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="component_number", + type_names=["int32", "vector"], + optional=False, + document="""One or several component index that will be + extracted from the initial field.""", + ), + 2: PinSpecification( + name="default_value", + type_names=["double"], + optional=True, + document="""Set a default value for components that do + not exist""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "component_selector") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="component_selector", server=server) @property def inputs(self): @@ -74,143 +130,139 @@ def inputs(self): Returns -------- - inputs : InputsComponentSelector + inputs : InputsComponentSelector """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsComponentSelector + outputs : OutputsComponentSelector """ return super().outputs -#internal name: component_selector -#scripting name: component_selector class InputsComponentSelector(_Inputs): - """Intermediate class used to connect user inputs to component_selector operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.component_selector() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_component_number = int() - >>> op.inputs.component_number.connect(my_component_number) - >>> my_default_value = float() - >>> op.inputs.default_value.connect(my_default_value) + """Intermediate class used to connect user inputs to + component_selector operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.component_selector() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_component_number = int() + >>> op.inputs.component_number.connect(my_component_number) + >>> my_default_value = float() + >>> op.inputs.default_value.connect(my_default_value) """ + def __init__(self, op: Operator): super().__init__(component_selector._spec().inputs, op) - self._field = Input(component_selector._spec().input_pin(0), 0, op, -1) + self._field = Input(component_selector._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._component_number = Input(component_selector._spec().input_pin(1), 1, op, -1) + self._component_number = Input( + component_selector._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._component_number) - self._default_value = Input(component_selector._spec().input_pin(2), 2, op, -1) + self._default_value = Input(component_selector._spec().input_pin(2), 2, op, -1) self._inputs.append(self._default_value) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.component_selector() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def component_number(self): - """Allows to connect component_number input to the operator + """Allows to connect component_number input to the operator. - - pindoc: one or several component index that will be extracted from the initial field. + One or several component index that will be + extracted from the initial field. Parameters ---------- - my_component_number : int, list, + my_component_number : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.component_selector() >>> op.inputs.component_number.connect(my_component_number) - >>> #or + >>> # or >>> op.inputs.component_number(my_component_number) - """ return self._component_number @property def default_value(self): - """Allows to connect default_value input to the operator + """Allows to connect default_value input to the operator. - - pindoc: set a default value for components that do not exist + Set a default value for components that do + not exist Parameters ---------- - my_default_value : float, + my_default_value : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.component_selector() >>> op.inputs.default_value.connect(my_default_value) - >>> #or + >>> # or >>> op.inputs.default_value(my_default_value) - """ return self._default_value + class OutputsComponentSelector(_Outputs): - """Intermediate class used to get outputs from component_selector operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.component_selector() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + component_selector operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.component_selector() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(component_selector._spec().outputs, op) - self._field = Output(component_selector._spec().output_pin(0), 0, op) + self._field = Output(component_selector._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.component_selector() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/logic/component_selector_fc.py b/ansys/dpf/core/operators/logic/component_selector_fc.py index a2f3a4a2c12..426bf27e153 100644 --- a/ansys/dpf/core/operators/logic/component_selector_fc.py +++ b/ansys/dpf/core/operators/logic/component_selector_fc.py @@ -1,66 +1,108 @@ """ component_selector_fc -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class component_selector_fc(Operator): - """Create a scalar fields container based on the selected component for each field. - - available inputs: - - fields_container (FieldsContainer) - - component_number (int, list) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.logic.component_selector_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_component_number = int() - >>> op.inputs.component_number.connect(my_component_number) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.component_selector_fc(fields_container=my_fields_container,component_number=my_component_number) + """Create a scalar fields container based on the selected component for + each field. + + Parameters + ---------- + fields_container : FieldsContainer or Field + component_number : int + One or several component index that will be + extracted from the initial field. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.logic.component_selector_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_component_number = int() + >>> op.inputs.component_number.connect(my_component_number) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.component_selector_fc( + ... fields_container=my_fields_container, + ... component_number=my_component_number, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, component_number=None, config=None, server=None): - super().__init__(name="component_selector_fc", config = config, server = server) + def __init__( + self, fields_container=None, component_number=None, config=None, server=None + ): + super().__init__(name="component_selector_fc", config=config, server=server) self._inputs = InputsComponentSelectorFc(self) self._outputs = OutputsComponentSelectorFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if component_number !=None: + if component_number is not None: self.inputs.component_number.connect(component_number) @staticmethod def _spec(): - spec = Specification(description="""Create a scalar fields container based on the selected component for each field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "component_number", type_names=["int32","vector"], optional=False, document="""one or several component index that will be extracted from the initial field.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Create a scalar fields container based on the selected component for + each field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container", "field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="component_number", + type_names=["int32", "vector"], + optional=False, + document="""One or several component index that will be + extracted from the initial field.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "component_selector_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="component_selector_fc", server=server) @property def inputs(self): @@ -68,117 +110,118 @@ def inputs(self): Returns -------- - inputs : InputsComponentSelectorFc + inputs : InputsComponentSelectorFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsComponentSelectorFc + outputs : OutputsComponentSelectorFc """ return super().outputs -#internal name: component_selector_fc -#scripting name: component_selector_fc class InputsComponentSelectorFc(_Inputs): - """Intermediate class used to connect user inputs to component_selector_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.component_selector_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_component_number = int() - >>> op.inputs.component_number.connect(my_component_number) + """Intermediate class used to connect user inputs to + component_selector_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.component_selector_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_component_number = int() + >>> op.inputs.component_number.connect(my_component_number) """ + def __init__(self, op: Operator): super().__init__(component_selector_fc._spec().inputs, op) - self._fields_container = Input(component_selector_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + component_selector_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._component_number = Input(component_selector_fc._spec().input_pin(1), 1, op, -1) + self._component_number = Input( + component_selector_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._component_number) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.component_selector_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def component_number(self): - """Allows to connect component_number input to the operator + """Allows to connect component_number input to the operator. - - pindoc: one or several component index that will be extracted from the initial field. + One or several component index that will be + extracted from the initial field. Parameters ---------- - my_component_number : int, list, + my_component_number : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.component_selector_fc() >>> op.inputs.component_number.connect(my_component_number) - >>> #or + >>> # or >>> op.inputs.component_number(my_component_number) - """ return self._component_number + class OutputsComponentSelectorFc(_Outputs): - """Intermediate class used to get outputs from component_selector_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.component_selector_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + component_selector_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.component_selector_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(component_selector_fc._spec().outputs, op) - self._fields_container = Output(component_selector_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + component_selector_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.component_selector_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/logic/enrich_materials.py b/ansys/dpf/core/operators/logic/enrich_materials.py index 012be285917..d57aa022601 100644 --- a/ansys/dpf/core/operators/logic/enrich_materials.py +++ b/ansys/dpf/core/operators/logic/enrich_materials.py @@ -1,68 +1,122 @@ """ enrich_materials -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class enrich_materials(Operator): - """Take a MaterialContainer and a stream and enrich the MaterialContainer using stream data. - - available inputs: - - MaterialContainer (Any) - - streams (StreamsContainer, FieldsContainer) - - streams_mapping () - - available outputs: - - MaterialContainer (bool) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.logic.enrich_materials() - - >>> # Make input connections - >>> my_MaterialContainer = dpf.Any() - >>> op.inputs.MaterialContainer.connect(my_MaterialContainer) - >>> my_streams = dpf.StreamsContainer() - >>> op.inputs.streams.connect(my_streams) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.enrich_materials(MaterialContainer=my_MaterialContainer,streams=my_streams) + """Take a MaterialContainer and a stream and enrich the MaterialContainer + using stream data. + + Parameters + ---------- + MaterialContainer : + streams : StreamsContainer or FieldsContainer + streams_mapping : Class Dataprocessing::Cpropertyfieldscontainer + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.logic.enrich_materials() + + >>> # Make input connections + >>> my_MaterialContainer = dpf.() + >>> op.inputs.MaterialContainer.connect(my_MaterialContainer) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_streams_mapping = dpf.Class Dataprocessing::Cpropertyfieldscontainer() + >>> op.inputs.streams_mapping.connect(my_streams_mapping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.enrich_materials( + ... MaterialContainer=my_MaterialContainer, + ... streams=my_streams, + ... streams_mapping=my_streams_mapping, + ... ) + + >>> # Get output data + >>> result_MaterialContainer = op.outputs.MaterialContainer() + """ - >>> # Get output data - >>> result_MaterialContainer = op.outputs.MaterialContainer()""" - def __init__(self, MaterialContainer=None, streams=None, config=None, server=None): - super().__init__(name="enrich_materials", config = config, server = server) + def __init__( + self, + MaterialContainer=None, + streams=None, + streams_mapping=None, + config=None, + server=None, + ): + super().__init__(name="enrich_materials", config=config, server=server) self._inputs = InputsEnrichMaterials(self) self._outputs = OutputsEnrichMaterials(self) - if MaterialContainer !=None: + if MaterialContainer is not None: self.inputs.MaterialContainer.connect(MaterialContainer) - if streams !=None: + if streams is not None: self.inputs.streams.connect(streams) + if streams_mapping is not None: + self.inputs.streams_mapping.connect(streams_mapping) @staticmethod def _spec(): - spec = Specification(description="""Take a MaterialContainer and a stream and enrich the MaterialContainer using stream data.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "MaterialContainer", type_names=["any"], optional=False, document=""""""), - 1 : PinSpecification(name = "streams", type_names=["streams_container","fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "streams_mapping", type_names=[], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "MaterialContainer", type_names=["bool"], optional=False, document="""""")}) + description = """Take a MaterialContainer and a stream and enrich the MaterialContainer + using stream data.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="MaterialContainer", + type_names=["any"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="streams", + type_names=["streams_container", "fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="streams_mapping", + type_names=["class dataProcessing::CPropertyFieldsContainer"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="MaterialContainer", + type_names=["bool"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "enrich_materials") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="enrich_materials", server=server) @property def inputs(self): @@ -70,115 +124,133 @@ def inputs(self): Returns -------- - inputs : InputsEnrichMaterials + inputs : InputsEnrichMaterials """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEnrichMaterials + outputs : OutputsEnrichMaterials """ return super().outputs -#internal name: enrich_materials -#scripting name: enrich_materials class InputsEnrichMaterials(_Inputs): - """Intermediate class used to connect user inputs to enrich_materials operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.enrich_materials() - >>> my_MaterialContainer = dpf.Any() - >>> op.inputs.MaterialContainer.connect(my_MaterialContainer) - >>> my_streams = dpf.StreamsContainer() - >>> op.inputs.streams.connect(my_streams) + """Intermediate class used to connect user inputs to + enrich_materials operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.enrich_materials() + >>> my_MaterialContainer = dpf.() + >>> op.inputs.MaterialContainer.connect(my_MaterialContainer) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_streams_mapping = dpf.Class Dataprocessing::Cpropertyfieldscontainer() + >>> op.inputs.streams_mapping.connect(my_streams_mapping) """ + def __init__(self, op: Operator): super().__init__(enrich_materials._spec().inputs, op) - self._MaterialContainer = Input(enrich_materials._spec().input_pin(0), 0, op, -1) + self._MaterialContainer = Input( + enrich_materials._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._MaterialContainer) - self._streams = Input(enrich_materials._spec().input_pin(1), 1, op, -1) + self._streams = Input(enrich_materials._spec().input_pin(1), 1, op, -1) self._inputs.append(self._streams) + self._streams_mapping = Input(enrich_materials._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._streams_mapping) @property def MaterialContainer(self): - """Allows to connect MaterialContainer input to the operator + """Allows to connect MaterialContainer input to the operator. Parameters ---------- - my_MaterialContainer : Any, + my_MaterialContainer : Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.enrich_materials() >>> op.inputs.MaterialContainer.connect(my_MaterialContainer) - >>> #or + >>> # or >>> op.inputs.MaterialContainer(my_MaterialContainer) - """ return self._MaterialContainer @property def streams(self): - """Allows to connect streams input to the operator + """Allows to connect streams input to the operator. Parameters ---------- - my_streams : StreamsContainer, FieldsContainer, + my_streams : StreamsContainer or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.enrich_materials() >>> op.inputs.streams.connect(my_streams) - >>> #or + >>> # or >>> op.inputs.streams(my_streams) - """ return self._streams + @property + def streams_mapping(self): + """Allows to connect streams_mapping input to the operator. + + Parameters + ---------- + my_streams_mapping : Class Dataprocessing::Cpropertyfieldscontainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.enrich_materials() + >>> op.inputs.streams_mapping.connect(my_streams_mapping) + >>> # or + >>> op.inputs.streams_mapping(my_streams_mapping) + """ + return self._streams_mapping + + class OutputsEnrichMaterials(_Outputs): - """Intermediate class used to get outputs from enrich_materials operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.enrich_materials() - >>> # Connect inputs : op.inputs. ... - >>> result_MaterialContainer = op.outputs.MaterialContainer() + """Intermediate class used to get outputs from + enrich_materials operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.enrich_materials() + >>> # Connect inputs : op.inputs. ... + >>> result_MaterialContainer = op.outputs.MaterialContainer() """ + def __init__(self, op: Operator): super().__init__(enrich_materials._spec().outputs, op) - self._MaterialContainer = Output(enrich_materials._spec().output_pin(0), 0, op) + self._MaterialContainer = Output(enrich_materials._spec().output_pin(0), 0, op) self._outputs.append(self._MaterialContainer) @property def MaterialContainer(self): """Allows to get MaterialContainer output of the operator - Returns ---------- - my_MaterialContainer : bool, + my_MaterialContainer : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.enrich_materials() >>> # Connect inputs : op.inputs. ... - >>> result_MaterialContainer = op.outputs.MaterialContainer() - """ + >>> result_MaterialContainer = op.outputs.MaterialContainer() + """ # noqa: E501 return self._MaterialContainer - diff --git a/ansys/dpf/core/operators/logic/identical_fc.py b/ansys/dpf/core/operators/logic/identical_fc.py index 046e66c9e8d..75e046f8436 100644 --- a/ansys/dpf/core/operators/logic/identical_fc.py +++ b/ansys/dpf/core/operators/logic/identical_fc.py @@ -1,81 +1,160 @@ """ identical_fc -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class identical_fc(Operator): """Check if two fields container are identical. - available inputs: - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - - tolerance (float) - - small_value (float) - - available outputs: - - boolean (bool) - - message (str) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.logic.identical_fc() - - >>> # Make input connections - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> my_tolerance = float() - >>> op.inputs.tolerance.connect(my_tolerance) - >>> my_small_value = float() - >>> op.inputs.small_value.connect(my_small_value) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.identical_fc(fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB,tolerance=my_tolerance,small_value=my_small_value) - - >>> # Get output data - >>> result_boolean = op.outputs.boolean() - >>> result_message = op.outputs.message()""" - def __init__(self, fields_containerA=None, fields_containerB=None, tolerance=None, small_value=None, config=None, server=None): - super().__init__(name="AreFieldsIdentical_fc", config = config, server = server) + Parameters + ---------- + fields_containerA : FieldsContainer + fields_containerB : FieldsContainer + small_value : float, optional + Double positive small value.smallest value + which will be considered during the + comparison step : all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14). + tolerance : float, optional + Double relative tolerance. maximum tolerance + gap between to compared values: + values within relative tolerance are + considered identical (v1-v2)/v2 < + relativetol (default is 0.001). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.logic.identical_fc() + + >>> # Make input connections + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> my_small_value = float() + >>> op.inputs.small_value.connect(my_small_value) + >>> my_tolerance = float() + >>> op.inputs.tolerance.connect(my_tolerance) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.identical_fc( + ... fields_containerA=my_fields_containerA, + ... fields_containerB=my_fields_containerB, + ... small_value=my_small_value, + ... tolerance=my_tolerance, + ... ) + + >>> # Get output data + >>> result_boolean = op.outputs.boolean() + >>> result_message = op.outputs.message() + """ + + def __init__( + self, + fields_containerA=None, + fields_containerB=None, + small_value=None, + tolerance=None, + config=None, + server=None, + ): + super().__init__(name="AreFieldsIdentical_fc", config=config, server=server) self._inputs = InputsIdenticalFc(self) self._outputs = OutputsIdenticalFc(self) - if fields_containerA !=None: + if fields_containerA is not None: self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: + if fields_containerB is not None: self.inputs.fields_containerB.connect(fields_containerB) - if tolerance !=None: - self.inputs.tolerance.connect(tolerance) - if small_value !=None: + if small_value is not None: self.inputs.small_value.connect(small_value) + if tolerance is not None: + self.inputs.tolerance.connect(tolerance) @staticmethod def _spec(): - spec = Specification(description="""Check if two fields container are identical.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "tolerance", type_names=["double"], optional=False, document="""Double relative tolerance. Maximum tolerance gap between to compared values: values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001)."""), - 3 : PinSpecification(name = "small_value", type_names=["double"], optional=False, document="""Double positive small value.Smallest value which will be considered during the comparison step : all the abs(values) in field less than this value is considered as null, (default value:1.0e-14).""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "boolean", type_names=["bool"], optional=False, document="""bool (true if identical...)"""), - 1 : PinSpecification(name = "message", type_names=["string"], optional=False, document="""""")}) + description = """Check if two fields container are identical.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_containerA", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fields_containerB", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="small_value", + type_names=["double"], + optional=True, + document="""Double positive small value.smallest value + which will be considered during the + comparison step : all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14).""", + ), + 3: PinSpecification( + name="tolerance", + type_names=["double"], + optional=True, + document="""Double relative tolerance. maximum tolerance + gap between to compared values: + values within relative tolerance are + considered identical (v1-v2)/v2 < + relativetol (default is 0.001).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="boolean", + type_names=["bool"], + optional=False, + document="""Bool (true if identical...)""", + ), + 1: PinSpecification( + name="message", + type_names=["string"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "AreFieldsIdentical_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="AreFieldsIdentical_fc", server=server) @property def inputs(self): @@ -83,191 +162,186 @@ def inputs(self): Returns -------- - inputs : InputsIdenticalFc + inputs : InputsIdenticalFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIdenticalFc + outputs : OutputsIdenticalFc """ return super().outputs -#internal name: AreFieldsIdentical_fc -#scripting name: identical_fc class InputsIdenticalFc(_Inputs): - """Intermediate class used to connect user inputs to identical_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.identical_fc() - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> my_tolerance = float() - >>> op.inputs.tolerance.connect(my_tolerance) - >>> my_small_value = float() - >>> op.inputs.small_value.connect(my_small_value) + """Intermediate class used to connect user inputs to + identical_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.identical_fc() + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> my_small_value = float() + >>> op.inputs.small_value.connect(my_small_value) + >>> my_tolerance = float() + >>> op.inputs.tolerance.connect(my_tolerance) """ + def __init__(self, op: Operator): super().__init__(identical_fc._spec().inputs, op) - self._fields_containerA = Input(identical_fc._spec().input_pin(0), 0, op, -1) + self._fields_containerA = Input(identical_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(identical_fc._spec().input_pin(1), 1, op, -1) + self._fields_containerB = Input(identical_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fields_containerB) - self._tolerance = Input(identical_fc._spec().input_pin(2), 2, op, -1) - self._inputs.append(self._tolerance) - self._small_value = Input(identical_fc._spec().input_pin(3), 3, op, -1) + self._small_value = Input(identical_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._small_value) + self._tolerance = Input(identical_fc._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._tolerance) @property def fields_containerA(self): - """Allows to connect fields_containerA input to the operator + """Allows to connect fields_containerA input to the operator. Parameters ---------- - my_fields_containerA : FieldsContainer, + my_fields_containerA : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fc() >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or + >>> # or >>> op.inputs.fields_containerA(my_fields_containerA) - """ return self._fields_containerA @property def fields_containerB(self): - """Allows to connect fields_containerB input to the operator + """Allows to connect fields_containerB input to the operator. Parameters ---------- - my_fields_containerB : FieldsContainer, + my_fields_containerB : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fc() >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or + >>> # or >>> op.inputs.fields_containerB(my_fields_containerB) - """ return self._fields_containerB @property - def tolerance(self): - """Allows to connect tolerance input to the operator + def small_value(self): + """Allows to connect small_value input to the operator. - - pindoc: Double relative tolerance. Maximum tolerance gap between to compared values: values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001). + Double positive small value.smallest value + which will be considered during the + comparison step : all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14). Parameters ---------- - my_tolerance : float, + my_small_value : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fc() - >>> op.inputs.tolerance.connect(my_tolerance) - >>> #or - >>> op.inputs.tolerance(my_tolerance) - + >>> op.inputs.small_value.connect(my_small_value) + >>> # or + >>> op.inputs.small_value(my_small_value) """ - return self._tolerance + return self._small_value @property - def small_value(self): - """Allows to connect small_value input to the operator + def tolerance(self): + """Allows to connect tolerance input to the operator. - - pindoc: Double positive small value.Smallest value which will be considered during the comparison step : all the abs(values) in field less than this value is considered as null, (default value:1.0e-14). + Double relative tolerance. maximum tolerance + gap between to compared values: + values within relative tolerance are + considered identical (v1-v2)/v2 < + relativetol (default is 0.001). Parameters ---------- - my_small_value : float, + my_tolerance : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fc() - >>> op.inputs.small_value.connect(my_small_value) - >>> #or - >>> op.inputs.small_value(my_small_value) - + >>> op.inputs.tolerance.connect(my_tolerance) + >>> # or + >>> op.inputs.tolerance(my_tolerance) """ - return self._small_value + return self._tolerance + class OutputsIdenticalFc(_Outputs): - """Intermediate class used to get outputs from identical_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.identical_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_boolean = op.outputs.boolean() - >>> result_message = op.outputs.message() + """Intermediate class used to get outputs from + identical_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.identical_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_boolean = op.outputs.boolean() + >>> result_message = op.outputs.message() """ + def __init__(self, op: Operator): super().__init__(identical_fc._spec().outputs, op) - self._boolean = Output(identical_fc._spec().output_pin(0), 0, op) + self._boolean = Output(identical_fc._spec().output_pin(0), 0, op) self._outputs.append(self._boolean) - self._message = Output(identical_fc._spec().output_pin(1), 1, op) + self._message = Output(identical_fc._spec().output_pin(1), 1, op) self._outputs.append(self._message) @property def boolean(self): """Allows to get boolean output of the operator - - - pindoc: bool (true if identical...) - Returns ---------- - my_boolean : bool, + my_boolean : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fc() >>> # Connect inputs : op.inputs. ... - >>> result_boolean = op.outputs.boolean() - """ + >>> result_boolean = op.outputs.boolean() + """ # noqa: E501 return self._boolean @property def message(self): """Allows to get message output of the operator - Returns ---------- - my_message : str, + my_message : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fc() >>> # Connect inputs : op.inputs. ... - >>> result_message = op.outputs.message() - """ + >>> result_message = op.outputs.message() + """ # noqa: E501 return self._message - diff --git a/ansys/dpf/core/operators/logic/identical_fields.py b/ansys/dpf/core/operators/logic/identical_fields.py index b385300fccb..ede80e67efe 100644 --- a/ansys/dpf/core/operators/logic/identical_fields.py +++ b/ansys/dpf/core/operators/logic/identical_fields.py @@ -1,81 +1,160 @@ """ identical_fields -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class identical_fields(Operator): """Check if two fields are identical. - available inputs: - - fieldA (Field) - - fieldB (Field) - - double_value (float) (optional) - - double_tolerance (float) (optional) - - available outputs: - - boolean (bool) - - message (str) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.logic.identical_fields() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - >>> my_double_value = float() - >>> op.inputs.double_value.connect(my_double_value) - >>> my_double_tolerance = float() - >>> op.inputs.double_tolerance.connect(my_double_tolerance) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.identical_fields(fieldA=my_fieldA,fieldB=my_fieldB,double_value=my_double_value,double_tolerance=my_double_tolerance) - - >>> # Get output data - >>> result_boolean = op.outputs.boolean() - >>> result_message = op.outputs.message()""" - def __init__(self, fieldA=None, fieldB=None, double_value=None, double_tolerance=None, config=None, server=None): - super().__init__(name="AreFieldsIdentical", config = config, server = server) + Parameters + ---------- + fieldA : Field + fieldB : Field + double_value : float, optional + Double positive small value. smallest value + which will be considered during the + comparison step: all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14). + double_tolerance : float, optional + Double relative tolerance.maximum tolerance + gap between to compared values : + values within relative tolerance are + considered identical(v1 - v2) / v2 < + relativetol(default is 0.001). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.logic.identical_fields() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> my_double_value = float() + >>> op.inputs.double_value.connect(my_double_value) + >>> my_double_tolerance = float() + >>> op.inputs.double_tolerance.connect(my_double_tolerance) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.identical_fields( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... double_value=my_double_value, + ... double_tolerance=my_double_tolerance, + ... ) + + >>> # Get output data + >>> result_boolean = op.outputs.boolean() + >>> result_message = op.outputs.message() + """ + + def __init__( + self, + fieldA=None, + fieldB=None, + double_value=None, + double_tolerance=None, + config=None, + server=None, + ): + super().__init__(name="AreFieldsIdentical", config=config, server=server) self._inputs = InputsIdenticalFields(self) self._outputs = OutputsIdenticalFields(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) - if double_value !=None: + if double_value is not None: self.inputs.double_value.connect(double_value) - if double_tolerance !=None: + if double_tolerance is not None: self.inputs.double_tolerance.connect(double_tolerance) @staticmethod def _spec(): - spec = Specification(description="""Check if two fields are identical.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "fieldB", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "double_value", type_names=["double"], optional=True, document="""Double positive small value. Smallest value which will be considered during the comparison step: all the abs(values) in field less than this value is considered as null, (default value:1.0e-14)."""), - 3 : PinSpecification(name = "double_tolerance", type_names=["double"], optional=True, document="""Double relative tolerance.Maximum tolerance gap between to compared values : values within relative tolerance are considered identical(v1 - v2) / v2 < relativeTol(default is 0.001).""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "boolean", type_names=["bool"], optional=False, document="""bool (true if identical...)"""), - 1 : PinSpecification(name = "message", type_names=["string"], optional=False, document="""""")}) + description = """Check if two fields are identical.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="double_value", + type_names=["double"], + optional=True, + document="""Double positive small value. smallest value + which will be considered during the + comparison step: all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14).""", + ), + 3: PinSpecification( + name="double_tolerance", + type_names=["double"], + optional=True, + document="""Double relative tolerance.maximum tolerance + gap between to compared values : + values within relative tolerance are + considered identical(v1 - v2) / v2 < + relativetol(default is 0.001).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="boolean", + type_names=["bool"], + optional=False, + document="""Bool (true if identical...)""", + ), + 1: PinSpecification( + name="message", + type_names=["string"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "AreFieldsIdentical") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="AreFieldsIdentical", server=server) @property def inputs(self): @@ -83,191 +162,186 @@ def inputs(self): Returns -------- - inputs : InputsIdenticalFields + inputs : InputsIdenticalFields """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIdenticalFields + outputs : OutputsIdenticalFields """ return super().outputs -#internal name: AreFieldsIdentical -#scripting name: identical_fields class InputsIdenticalFields(_Inputs): - """Intermediate class used to connect user inputs to identical_fields operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.identical_fields() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - >>> my_double_value = float() - >>> op.inputs.double_value.connect(my_double_value) - >>> my_double_tolerance = float() - >>> op.inputs.double_tolerance.connect(my_double_tolerance) + """Intermediate class used to connect user inputs to + identical_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.identical_fields() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> my_double_value = float() + >>> op.inputs.double_value.connect(my_double_value) + >>> my_double_tolerance = float() + >>> op.inputs.double_tolerance.connect(my_double_tolerance) """ + def __init__(self, op: Operator): super().__init__(identical_fields._spec().inputs, op) - self._fieldA = Input(identical_fields._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(identical_fields._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(identical_fields._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(identical_fields._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) - self._double_value = Input(identical_fields._spec().input_pin(2), 2, op, -1) + self._double_value = Input(identical_fields._spec().input_pin(2), 2, op, -1) self._inputs.append(self._double_value) - self._double_tolerance = Input(identical_fields._spec().input_pin(3), 3, op, -1) + self._double_tolerance = Input(identical_fields._spec().input_pin(3), 3, op, -1) self._inputs.append(self._double_tolerance) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. Parameters ---------- - my_fieldA : Field, + my_fieldA : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fields() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. Parameters ---------- - my_fieldB : Field, + my_fieldB : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fields() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB @property def double_value(self): - """Allows to connect double_value input to the operator + """Allows to connect double_value input to the operator. - - pindoc: Double positive small value. Smallest value which will be considered during the comparison step: all the abs(values) in field less than this value is considered as null, (default value:1.0e-14). + Double positive small value. smallest value + which will be considered during the + comparison step: all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14). Parameters ---------- - my_double_value : float, + my_double_value : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fields() >>> op.inputs.double_value.connect(my_double_value) - >>> #or + >>> # or >>> op.inputs.double_value(my_double_value) - """ return self._double_value @property def double_tolerance(self): - """Allows to connect double_tolerance input to the operator + """Allows to connect double_tolerance input to the operator. - - pindoc: Double relative tolerance.Maximum tolerance gap between to compared values : values within relative tolerance are considered identical(v1 - v2) / v2 < relativeTol(default is 0.001). + Double relative tolerance.maximum tolerance + gap between to compared values : + values within relative tolerance are + considered identical(v1 - v2) / v2 < + relativetol(default is 0.001). Parameters ---------- - my_double_tolerance : float, + my_double_tolerance : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fields() >>> op.inputs.double_tolerance.connect(my_double_tolerance) - >>> #or + >>> # or >>> op.inputs.double_tolerance(my_double_tolerance) - """ return self._double_tolerance + class OutputsIdenticalFields(_Outputs): - """Intermediate class used to get outputs from identical_fields operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.identical_fields() - >>> # Connect inputs : op.inputs. ... - >>> result_boolean = op.outputs.boolean() - >>> result_message = op.outputs.message() + """Intermediate class used to get outputs from + identical_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.identical_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_boolean = op.outputs.boolean() + >>> result_message = op.outputs.message() """ + def __init__(self, op: Operator): super().__init__(identical_fields._spec().outputs, op) - self._boolean = Output(identical_fields._spec().output_pin(0), 0, op) + self._boolean = Output(identical_fields._spec().output_pin(0), 0, op) self._outputs.append(self._boolean) - self._message = Output(identical_fields._spec().output_pin(1), 1, op) + self._message = Output(identical_fields._spec().output_pin(1), 1, op) self._outputs.append(self._message) @property def boolean(self): """Allows to get boolean output of the operator - - - pindoc: bool (true if identical...) - Returns ---------- - my_boolean : bool, + my_boolean : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fields() >>> # Connect inputs : op.inputs. ... - >>> result_boolean = op.outputs.boolean() - """ + >>> result_boolean = op.outputs.boolean() + """ # noqa: E501 return self._boolean @property def message(self): """Allows to get message output of the operator - Returns ---------- - my_message : str, + my_message : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_fields() >>> # Connect inputs : op.inputs. ... - >>> result_message = op.outputs.message() - """ + >>> result_message = op.outputs.message() + """ # noqa: E501 return self._message - diff --git a/ansys/dpf/core/operators/logic/identical_meshes.py b/ansys/dpf/core/operators/logic/identical_meshes.py index 6c45f214927..54faa9f5c6c 100644 --- a/ansys/dpf/core/operators/logic/identical_meshes.py +++ b/ansys/dpf/core/operators/logic/identical_meshes.py @@ -1,78 +1,139 @@ """ identical_meshes -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class identical_meshes(Operator): """Take two meshes and compare them. - available inputs: - - meshA (MeshedRegion) - - meshB (MeshedRegion) - - small_value (float) - - tolerance (float) - - available outputs: - - are_identical (bool) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.logic.identical_meshes() - - >>> # Make input connections - >>> my_meshA = dpf.MeshedRegion() - >>> op.inputs.meshA.connect(my_meshA) - >>> my_meshB = dpf.MeshedRegion() - >>> op.inputs.meshB.connect(my_meshB) - >>> my_small_value = float() - >>> op.inputs.small_value.connect(my_small_value) - >>> my_tolerance = float() - >>> op.inputs.tolerance.connect(my_tolerance) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.identical_meshes(meshA=my_meshA,meshB=my_meshB,small_value=my_small_value,tolerance=my_tolerance) - - >>> # Get output data - >>> result_are_identical = op.outputs.are_identical()""" - def __init__(self, meshA=None, meshB=None, small_value=None, tolerance=None, config=None, server=None): - super().__init__(name="compare::mesh", config = config, server = server) + Parameters + ---------- + meshA : MeshedRegion + meshB : MeshedRegion + small_value : float + Define what is a small value for numeric + comparison. + tolerance : float + Define the relative tolerance ceil for + numeric comparison. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.logic.identical_meshes() + + >>> # Make input connections + >>> my_meshA = dpf.MeshedRegion() + >>> op.inputs.meshA.connect(my_meshA) + >>> my_meshB = dpf.MeshedRegion() + >>> op.inputs.meshB.connect(my_meshB) + >>> my_small_value = float() + >>> op.inputs.small_value.connect(my_small_value) + >>> my_tolerance = float() + >>> op.inputs.tolerance.connect(my_tolerance) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.identical_meshes( + ... meshA=my_meshA, + ... meshB=my_meshB, + ... small_value=my_small_value, + ... tolerance=my_tolerance, + ... ) + + >>> # Get output data + >>> result_are_identical = op.outputs.are_identical() + """ + + def __init__( + self, + meshA=None, + meshB=None, + small_value=None, + tolerance=None, + config=None, + server=None, + ): + super().__init__(name="compare::mesh", config=config, server=server) self._inputs = InputsIdenticalMeshes(self) self._outputs = OutputsIdenticalMeshes(self) - if meshA !=None: + if meshA is not None: self.inputs.meshA.connect(meshA) - if meshB !=None: + if meshB is not None: self.inputs.meshB.connect(meshB) - if small_value !=None: + if small_value is not None: self.inputs.small_value.connect(small_value) - if tolerance !=None: + if tolerance is not None: self.inputs.tolerance.connect(tolerance) @staticmethod def _spec(): - spec = Specification(description="""Take two meshes and compare them.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "meshA", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "meshB", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 2 : PinSpecification(name = "small_value", type_names=["double"], optional=False, document="""define what is a small value for numeric comparison."""), - 3 : PinSpecification(name = "tolerance", type_names=["double"], optional=False, document="""define the relative tolerance ceil for numeric comparison.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "are_identical", type_names=["bool"], optional=False, document="""""")}) + description = """Take two meshes and compare them.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="meshA", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="meshB", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="small_value", + type_names=["double"], + optional=False, + document="""Define what is a small value for numeric + comparison.""", + ), + 3: PinSpecification( + name="tolerance", + type_names=["double"], + optional=False, + document="""Define the relative tolerance ceil for + numeric comparison.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="are_identical", + type_names=["bool"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "compare::mesh") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compare::mesh", server=server) @property def inputs(self): @@ -80,167 +141,159 @@ def inputs(self): Returns -------- - inputs : InputsIdenticalMeshes + inputs : InputsIdenticalMeshes """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIdenticalMeshes + outputs : OutputsIdenticalMeshes """ return super().outputs -#internal name: compare::mesh -#scripting name: identical_meshes class InputsIdenticalMeshes(_Inputs): - """Intermediate class used to connect user inputs to identical_meshes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.identical_meshes() - >>> my_meshA = dpf.MeshedRegion() - >>> op.inputs.meshA.connect(my_meshA) - >>> my_meshB = dpf.MeshedRegion() - >>> op.inputs.meshB.connect(my_meshB) - >>> my_small_value = float() - >>> op.inputs.small_value.connect(my_small_value) - >>> my_tolerance = float() - >>> op.inputs.tolerance.connect(my_tolerance) + """Intermediate class used to connect user inputs to + identical_meshes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.identical_meshes() + >>> my_meshA = dpf.MeshedRegion() + >>> op.inputs.meshA.connect(my_meshA) + >>> my_meshB = dpf.MeshedRegion() + >>> op.inputs.meshB.connect(my_meshB) + >>> my_small_value = float() + >>> op.inputs.small_value.connect(my_small_value) + >>> my_tolerance = float() + >>> op.inputs.tolerance.connect(my_tolerance) """ + def __init__(self, op: Operator): super().__init__(identical_meshes._spec().inputs, op) - self._meshA = Input(identical_meshes._spec().input_pin(0), 0, op, -1) + self._meshA = Input(identical_meshes._spec().input_pin(0), 0, op, -1) self._inputs.append(self._meshA) - self._meshB = Input(identical_meshes._spec().input_pin(1), 1, op, -1) + self._meshB = Input(identical_meshes._spec().input_pin(1), 1, op, -1) self._inputs.append(self._meshB) - self._small_value = Input(identical_meshes._spec().input_pin(2), 2, op, -1) + self._small_value = Input(identical_meshes._spec().input_pin(2), 2, op, -1) self._inputs.append(self._small_value) - self._tolerance = Input(identical_meshes._spec().input_pin(3), 3, op, -1) + self._tolerance = Input(identical_meshes._spec().input_pin(3), 3, op, -1) self._inputs.append(self._tolerance) @property def meshA(self): - """Allows to connect meshA input to the operator + """Allows to connect meshA input to the operator. Parameters ---------- - my_meshA : MeshedRegion, + my_meshA : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_meshes() >>> op.inputs.meshA.connect(my_meshA) - >>> #or + >>> # or >>> op.inputs.meshA(my_meshA) - """ return self._meshA @property def meshB(self): - """Allows to connect meshB input to the operator + """Allows to connect meshB input to the operator. Parameters ---------- - my_meshB : MeshedRegion, + my_meshB : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_meshes() >>> op.inputs.meshB.connect(my_meshB) - >>> #or + >>> # or >>> op.inputs.meshB(my_meshB) - """ return self._meshB @property def small_value(self): - """Allows to connect small_value input to the operator + """Allows to connect small_value input to the operator. - - pindoc: define what is a small value for numeric comparison. + Define what is a small value for numeric + comparison. Parameters ---------- - my_small_value : float, + my_small_value : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_meshes() >>> op.inputs.small_value.connect(my_small_value) - >>> #or + >>> # or >>> op.inputs.small_value(my_small_value) - """ return self._small_value @property def tolerance(self): - """Allows to connect tolerance input to the operator + """Allows to connect tolerance input to the operator. - - pindoc: define the relative tolerance ceil for numeric comparison. + Define the relative tolerance ceil for + numeric comparison. Parameters ---------- - my_tolerance : float, + my_tolerance : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_meshes() >>> op.inputs.tolerance.connect(my_tolerance) - >>> #or + >>> # or >>> op.inputs.tolerance(my_tolerance) - """ return self._tolerance + class OutputsIdenticalMeshes(_Outputs): - """Intermediate class used to get outputs from identical_meshes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.identical_meshes() - >>> # Connect inputs : op.inputs. ... - >>> result_are_identical = op.outputs.are_identical() + """Intermediate class used to get outputs from + identical_meshes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.identical_meshes() + >>> # Connect inputs : op.inputs. ... + >>> result_are_identical = op.outputs.are_identical() """ + def __init__(self, op: Operator): super().__init__(identical_meshes._spec().outputs, op) - self._are_identical = Output(identical_meshes._spec().output_pin(0), 0, op) + self._are_identical = Output(identical_meshes._spec().output_pin(0), 0, op) self._outputs.append(self._are_identical) @property def are_identical(self): """Allows to get are_identical output of the operator - Returns ---------- - my_are_identical : bool, + my_are_identical : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_meshes() >>> # Connect inputs : op.inputs. ... - >>> result_are_identical = op.outputs.are_identical() - """ + >>> result_are_identical = op.outputs.are_identical() + """ # noqa: E501 return self._are_identical - diff --git a/ansys/dpf/core/operators/logic/identical_property_fields.py b/ansys/dpf/core/operators/logic/identical_property_fields.py index 700a5a98f61..5fdbf21ee60 100644 --- a/ansys/dpf/core/operators/logic/identical_property_fields.py +++ b/ansys/dpf/core/operators/logic/identical_property_fields.py @@ -1,69 +1,110 @@ """ identical_property_fields -========================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class identical_property_fields(Operator): """Take two property fields and compare them. - available inputs: - - property_fieldA (MeshedRegion) - - property_fieldB (MeshedRegion) + Parameters + ---------- + property_fieldA : MeshedRegion + property_fieldB : MeshedRegion + - available outputs: - - are_identical (bool) - - informations (str) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.logic.identical_property_fields() - >>> # Instantiate operator - >>> op = dpf.operators.logic.identical_property_fields() + >>> # Make input connections + >>> my_property_fieldA = dpf.MeshedRegion() + >>> op.inputs.property_fieldA.connect(my_property_fieldA) + >>> my_property_fieldB = dpf.MeshedRegion() + >>> op.inputs.property_fieldB.connect(my_property_fieldB) - >>> # Make input connections - >>> my_property_fieldA = dpf.MeshedRegion() - >>> op.inputs.property_fieldA.connect(my_property_fieldA) - >>> my_property_fieldB = dpf.MeshedRegion() - >>> op.inputs.property_fieldB.connect(my_property_fieldB) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.identical_property_fields( + ... property_fieldA=my_property_fieldA, + ... property_fieldB=my_property_fieldB, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.identical_property_fields(property_fieldA=my_property_fieldA,property_fieldB=my_property_fieldB) + >>> # Get output data + >>> result_are_identical = op.outputs.are_identical() + >>> result_information = op.outputs.information() + """ - >>> # Get output data - >>> result_are_identical = op.outputs.are_identical() - >>> result_informations = op.outputs.informations()""" - def __init__(self, property_fieldA=None, property_fieldB=None, config=None, server=None): - super().__init__(name="compare::property_field", config = config, server = server) + def __init__( + self, property_fieldA=None, property_fieldB=None, config=None, server=None + ): + super().__init__(name="compare::property_field", config=config, server=server) self._inputs = InputsIdenticalPropertyFields(self) self._outputs = OutputsIdenticalPropertyFields(self) - if property_fieldA !=None: + if property_fieldA is not None: self.inputs.property_fieldA.connect(property_fieldA) - if property_fieldB !=None: + if property_fieldB is not None: self.inputs.property_fieldB.connect(property_fieldB) @staticmethod def _spec(): - spec = Specification(description="""Take two property fields and compare them.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "property_fieldA", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "property_fieldB", type_names=["abstract_meshed_region"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "are_identical", type_names=["bool"], optional=False, document=""""""), - 1 : PinSpecification(name = "informations", type_names=["string"], optional=False, document="""""")}) + description = """Take two property fields and compare them.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="property_fieldA", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="property_fieldB", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="are_identical", + type_names=["bool"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="information", + type_names=["string"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "compare::property_field") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compare::property_field", server=server) @property def inputs(self): @@ -71,137 +112,137 @@ def inputs(self): Returns -------- - inputs : InputsIdenticalPropertyFields + inputs : InputsIdenticalPropertyFields """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIdenticalPropertyFields + outputs : OutputsIdenticalPropertyFields """ return super().outputs -#internal name: compare::property_field -#scripting name: identical_property_fields class InputsIdenticalPropertyFields(_Inputs): - """Intermediate class used to connect user inputs to identical_property_fields operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.identical_property_fields() - >>> my_property_fieldA = dpf.MeshedRegion() - >>> op.inputs.property_fieldA.connect(my_property_fieldA) - >>> my_property_fieldB = dpf.MeshedRegion() - >>> op.inputs.property_fieldB.connect(my_property_fieldB) + """Intermediate class used to connect user inputs to + identical_property_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.identical_property_fields() + >>> my_property_fieldA = dpf.MeshedRegion() + >>> op.inputs.property_fieldA.connect(my_property_fieldA) + >>> my_property_fieldB = dpf.MeshedRegion() + >>> op.inputs.property_fieldB.connect(my_property_fieldB) """ + def __init__(self, op: Operator): super().__init__(identical_property_fields._spec().inputs, op) - self._property_fieldA = Input(identical_property_fields._spec().input_pin(0), 0, op, -1) + self._property_fieldA = Input( + identical_property_fields._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._property_fieldA) - self._property_fieldB = Input(identical_property_fields._spec().input_pin(1), 1, op, -1) + self._property_fieldB = Input( + identical_property_fields._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._property_fieldB) @property def property_fieldA(self): - """Allows to connect property_fieldA input to the operator + """Allows to connect property_fieldA input to the operator. Parameters ---------- - my_property_fieldA : MeshedRegion, + my_property_fieldA : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_property_fields() >>> op.inputs.property_fieldA.connect(my_property_fieldA) - >>> #or + >>> # or >>> op.inputs.property_fieldA(my_property_fieldA) - """ return self._property_fieldA @property def property_fieldB(self): - """Allows to connect property_fieldB input to the operator + """Allows to connect property_fieldB input to the operator. Parameters ---------- - my_property_fieldB : MeshedRegion, + my_property_fieldB : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_property_fields() >>> op.inputs.property_fieldB.connect(my_property_fieldB) - >>> #or + >>> # or >>> op.inputs.property_fieldB(my_property_fieldB) - """ return self._property_fieldB + class OutputsIdenticalPropertyFields(_Outputs): - """Intermediate class used to get outputs from identical_property_fields operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.identical_property_fields() - >>> # Connect inputs : op.inputs. ... - >>> result_are_identical = op.outputs.are_identical() - >>> result_informations = op.outputs.informations() + """Intermediate class used to get outputs from + identical_property_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.identical_property_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_are_identical = op.outputs.are_identical() + >>> result_information = op.outputs.information() """ + def __init__(self, op: Operator): super().__init__(identical_property_fields._spec().outputs, op) - self._are_identical = Output(identical_property_fields._spec().output_pin(0), 0, op) + self._are_identical = Output( + identical_property_fields._spec().output_pin(0), 0, op + ) self._outputs.append(self._are_identical) - self._informations = Output(identical_property_fields._spec().output_pin(1), 1, op) - self._outputs.append(self._informations) + self._information = Output( + identical_property_fields._spec().output_pin(1), 1, op + ) + self._outputs.append(self._information) @property def are_identical(self): """Allows to get are_identical output of the operator - Returns ---------- - my_are_identical : bool, + my_are_identical : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_property_fields() >>> # Connect inputs : op.inputs. ... - >>> result_are_identical = op.outputs.are_identical() - """ + >>> result_are_identical = op.outputs.are_identical() + """ # noqa: E501 return self._are_identical @property - def informations(self): - """Allows to get informations output of the operator - + def information(self): + """Allows to get information output of the operator Returns ---------- - my_informations : str, + my_information : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.identical_property_fields() >>> # Connect inputs : op.inputs. ... - >>> result_informations = op.outputs.informations() - """ - return self._informations - + >>> result_information = op.outputs.information() + """ # noqa: E501 + return self._information diff --git a/ansys/dpf/core/operators/logic/included_fields.py b/ansys/dpf/core/operators/logic/included_fields.py index 57012fecde8..f46f8e9ac7b 100644 --- a/ansys/dpf/core/operators/logic/included_fields.py +++ b/ansys/dpf/core/operators/logic/included_fields.py @@ -1,81 +1,160 @@ """ included_fields =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class included_fields(Operator): """Check if one field belongs to another. - available inputs: - - fieldA (Field) - - fieldB (Field) - - double_value (float) - - double_tolerance (float) (optional) - - available outputs: - - included (bool) - - message (str) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.logic.included_fields() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - >>> my_double_value = float() - >>> op.inputs.double_value.connect(my_double_value) - >>> my_double_tolerance = float() - >>> op.inputs.double_tolerance.connect(my_double_tolerance) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.included_fields(fieldA=my_fieldA,fieldB=my_fieldB,double_value=my_double_value,double_tolerance=my_double_tolerance) - - >>> # Get output data - >>> result_included = op.outputs.included() - >>> result_message = op.outputs.message()""" - def __init__(self, fieldA=None, fieldB=None, double_value=None, double_tolerance=None, config=None, server=None): - super().__init__(name="Are_fields_included", config = config, server = server) + Parameters + ---------- + fieldA : Field + fieldB : Field + double_value : float + Double positive small value. smallest value + which will be considered during the + comparison step: all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14). + double_tolerance : float, optional + Double relative tolerance. maximum tolerance + gap between to compared values: + values within relative tolerance are + considered identical (v1-v2)/v2 < + relativetol (default is 0.001). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.logic.included_fields() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> my_double_value = float() + >>> op.inputs.double_value.connect(my_double_value) + >>> my_double_tolerance = float() + >>> op.inputs.double_tolerance.connect(my_double_tolerance) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.included_fields( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... double_value=my_double_value, + ... double_tolerance=my_double_tolerance, + ... ) + + >>> # Get output data + >>> result_included = op.outputs.included() + >>> result_message = op.outputs.message() + """ + + def __init__( + self, + fieldA=None, + fieldB=None, + double_value=None, + double_tolerance=None, + config=None, + server=None, + ): + super().__init__(name="Are_fields_included", config=config, server=server) self._inputs = InputsIncludedFields(self) self._outputs = OutputsIncludedFields(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) - if double_value !=None: + if double_value is not None: self.inputs.double_value.connect(double_value) - if double_tolerance !=None: + if double_tolerance is not None: self.inputs.double_tolerance.connect(double_tolerance) @staticmethod def _spec(): - spec = Specification(description="""Check if one field belongs to another.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "fieldB", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "double_value", type_names=["double"], optional=False, document="""Double positive small value. Smallest value which will be considered during the comparison step: all the abs(values) in field less than this value is considered as null, (default value:1.0e-14)."""), - 3 : PinSpecification(name = "double_tolerance", type_names=["double"], optional=True, document="""Double relative tolerance. Maximum tolerance gap between to compared values: values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001).""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "included", type_names=["bool"], optional=False, document="""bool (true if belongs...)"""), - 1 : PinSpecification(name = "message", type_names=["string"], optional=False, document="""""")}) + description = """Check if one field belongs to another.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="double_value", + type_names=["double"], + optional=False, + document="""Double positive small value. smallest value + which will be considered during the + comparison step: all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14).""", + ), + 3: PinSpecification( + name="double_tolerance", + type_names=["double"], + optional=True, + document="""Double relative tolerance. maximum tolerance + gap between to compared values: + values within relative tolerance are + considered identical (v1-v2)/v2 < + relativetol (default is 0.001).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="included", + type_names=["bool"], + optional=False, + document="""Bool (true if belongs...)""", + ), + 1: PinSpecification( + name="message", + type_names=["string"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "Are_fields_included") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="Are_fields_included", server=server) @property def inputs(self): @@ -83,191 +162,186 @@ def inputs(self): Returns -------- - inputs : InputsIncludedFields + inputs : InputsIncludedFields """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIncludedFields + outputs : OutputsIncludedFields """ return super().outputs -#internal name: Are_fields_included -#scripting name: included_fields class InputsIncludedFields(_Inputs): - """Intermediate class used to connect user inputs to included_fields operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.included_fields() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - >>> my_double_value = float() - >>> op.inputs.double_value.connect(my_double_value) - >>> my_double_tolerance = float() - >>> op.inputs.double_tolerance.connect(my_double_tolerance) + """Intermediate class used to connect user inputs to + included_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.included_fields() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> my_double_value = float() + >>> op.inputs.double_value.connect(my_double_value) + >>> my_double_tolerance = float() + >>> op.inputs.double_tolerance.connect(my_double_tolerance) """ + def __init__(self, op: Operator): super().__init__(included_fields._spec().inputs, op) - self._fieldA = Input(included_fields._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(included_fields._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(included_fields._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(included_fields._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) - self._double_value = Input(included_fields._spec().input_pin(2), 2, op, -1) + self._double_value = Input(included_fields._spec().input_pin(2), 2, op, -1) self._inputs.append(self._double_value) - self._double_tolerance = Input(included_fields._spec().input_pin(3), 3, op, -1) + self._double_tolerance = Input(included_fields._spec().input_pin(3), 3, op, -1) self._inputs.append(self._double_tolerance) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. Parameters ---------- - my_fieldA : Field, + my_fieldA : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.included_fields() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. Parameters ---------- - my_fieldB : Field, + my_fieldB : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.included_fields() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB @property def double_value(self): - """Allows to connect double_value input to the operator + """Allows to connect double_value input to the operator. - - pindoc: Double positive small value. Smallest value which will be considered during the comparison step: all the abs(values) in field less than this value is considered as null, (default value:1.0e-14). + Double positive small value. smallest value + which will be considered during the + comparison step: all the abs(values) + in field less than this value is + considered as null, (default + value:1.0e-14). Parameters ---------- - my_double_value : float, + my_double_value : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.included_fields() >>> op.inputs.double_value.connect(my_double_value) - >>> #or + >>> # or >>> op.inputs.double_value(my_double_value) - """ return self._double_value @property def double_tolerance(self): - """Allows to connect double_tolerance input to the operator + """Allows to connect double_tolerance input to the operator. - - pindoc: Double relative tolerance. Maximum tolerance gap between to compared values: values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001). + Double relative tolerance. maximum tolerance + gap between to compared values: + values within relative tolerance are + considered identical (v1-v2)/v2 < + relativetol (default is 0.001). Parameters ---------- - my_double_tolerance : float, + my_double_tolerance : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.included_fields() >>> op.inputs.double_tolerance.connect(my_double_tolerance) - >>> #or + >>> # or >>> op.inputs.double_tolerance(my_double_tolerance) - """ return self._double_tolerance + class OutputsIncludedFields(_Outputs): - """Intermediate class used to get outputs from included_fields operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.included_fields() - >>> # Connect inputs : op.inputs. ... - >>> result_included = op.outputs.included() - >>> result_message = op.outputs.message() + """Intermediate class used to get outputs from + included_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.included_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_included = op.outputs.included() + >>> result_message = op.outputs.message() """ + def __init__(self, op: Operator): super().__init__(included_fields._spec().outputs, op) - self._included = Output(included_fields._spec().output_pin(0), 0, op) + self._included = Output(included_fields._spec().output_pin(0), 0, op) self._outputs.append(self._included) - self._message = Output(included_fields._spec().output_pin(1), 1, op) + self._message = Output(included_fields._spec().output_pin(1), 1, op) self._outputs.append(self._message) @property def included(self): """Allows to get included output of the operator - - - pindoc: bool (true if belongs...) - Returns ---------- - my_included : bool, + my_included : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.included_fields() >>> # Connect inputs : op.inputs. ... - >>> result_included = op.outputs.included() - """ + >>> result_included = op.outputs.included() + """ # noqa: E501 return self._included @property def message(self): """Allows to get message output of the operator - Returns ---------- - my_message : str, + my_message : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.included_fields() >>> # Connect inputs : op.inputs. ... - >>> result_message = op.outputs.message() - """ + >>> result_message = op.outputs.message() + """ # noqa: E501 return self._message - diff --git a/ansys/dpf/core/operators/logic/merge_fields_by_label.py b/ansys/dpf/core/operators/logic/merge_fields_by_label.py deleted file mode 100644 index b11237f1e83..00000000000 --- a/ansys/dpf/core/operators/logic/merge_fields_by_label.py +++ /dev/null @@ -1,273 +0,0 @@ -""" -merge_fields_by_label -===================== -""" -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type -from ansys.dpf.core.operators.specification import PinSpecification, Specification - -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" - -class merge_fields_by_label(Operator): - """Take a fields container and merge its fields that share the same label value. - - available inputs: - - fields_container (FieldsContainer) - - label (str) - - merged_field_support (AbstractFieldSupport) (optional) - - sumMerge (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - merged_field_support (AbstractFieldSupport) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.logic.merge_fields_by_label() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_label = str() - >>> op.inputs.label.connect(my_label) - >>> my_merged_field_support = dpf.AbstractFieldSupport() - >>> op.inputs.merged_field_support.connect(my_merged_field_support) - >>> my_sumMerge = bool() - >>> op.inputs.sumMerge.connect(my_sumMerge) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.merge_fields_by_label(fields_container=my_fields_container,label=my_label,merged_field_support=my_merged_field_support,sumMerge=my_sumMerge) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_merged_field_support = op.outputs.merged_field_support()""" - def __init__(self, fields_container=None, label=None, merged_field_support=None, sumMerge=None, config=None, server=None): - super().__init__(name="merge::fields_container_label", config = config, server = server) - self._inputs = InputsMergeFieldsByLabel(self) - self._outputs = OutputsMergeFieldsByLabel(self) - if fields_container !=None: - self.inputs.fields_container.connect(fields_container) - if label !=None: - self.inputs.label.connect(label) - if merged_field_support !=None: - self.inputs.merged_field_support.connect(merged_field_support) - if sumMerge !=None: - self.inputs.sumMerge.connect(sumMerge) - - @staticmethod - def _spec(): - spec = Specification(description="""Take a fields container and merge its fields that share the same label value.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "label", type_names=["string"], optional=False, document="""Label identifier that should be merged."""), - 2 : PinSpecification(name = "merged_field_support", type_names=["abstract_field_support"], optional=True, document="""The FieldsContainer's support that has already been merged."""), - 3 : PinSpecification(name = "sumMerge", type_names=["bool"], optional=True, document="""Default is false. If true redundant quantities are summed instead of being ignored.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "merged_field_support", type_names=["abstract_field_support"], optional=False, document="""""")}) - return spec - - - @staticmethod - def default_config(): - return Operator.default_config(name = "merge::fields_container_label") - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsMergeFieldsByLabel - """ - return super().inputs - - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsMergeFieldsByLabel - """ - return super().outputs - - -#internal name: merge::fields_container_label -#scripting name: merge_fields_by_label -class InputsMergeFieldsByLabel(_Inputs): - """Intermediate class used to connect user inputs to merge_fields_by_label operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.merge_fields_by_label() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_label = str() - >>> op.inputs.label.connect(my_label) - >>> my_merged_field_support = dpf.AbstractFieldSupport() - >>> op.inputs.merged_field_support.connect(my_merged_field_support) - >>> my_sumMerge = bool() - >>> op.inputs.sumMerge.connect(my_sumMerge) - """ - def __init__(self, op: Operator): - super().__init__(merge_fields_by_label._spec().inputs, op) - self._fields_container = Input(merge_fields_by_label._spec().input_pin(0), 0, op, -1) - self._inputs.append(self._fields_container) - self._label = Input(merge_fields_by_label._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._label) - self._merged_field_support = Input(merge_fields_by_label._spec().input_pin(2), 2, op, -1) - self._inputs.append(self._merged_field_support) - self._sumMerge = Input(merge_fields_by_label._spec().input_pin(3), 3, op, -1) - self._inputs.append(self._sumMerge) - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator - - Parameters - ---------- - my_fields_container : FieldsContainer, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.merge_fields_by_label() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or - >>> op.inputs.fields_container(my_fields_container) - - """ - return self._fields_container - - @property - def label(self): - """Allows to connect label input to the operator - - - pindoc: Label identifier that should be merged. - - Parameters - ---------- - my_label : str, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.merge_fields_by_label() - >>> op.inputs.label.connect(my_label) - >>> #or - >>> op.inputs.label(my_label) - - """ - return self._label - - @property - def merged_field_support(self): - """Allows to connect merged_field_support input to the operator - - - pindoc: The FieldsContainer's support that has already been merged. - - Parameters - ---------- - my_merged_field_support : AbstractFieldSupport, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.merge_fields_by_label() - >>> op.inputs.merged_field_support.connect(my_merged_field_support) - >>> #or - >>> op.inputs.merged_field_support(my_merged_field_support) - - """ - return self._merged_field_support - - @property - def sumMerge(self): - """Allows to connect sumMerge input to the operator - - - pindoc: Default is false. If true redundant quantities are summed instead of being ignored. - - Parameters - ---------- - my_sumMerge : bool, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.merge_fields_by_label() - >>> op.inputs.sumMerge.connect(my_sumMerge) - >>> #or - >>> op.inputs.sumMerge(my_sumMerge) - - """ - return self._sumMerge - -class OutputsMergeFieldsByLabel(_Outputs): - """Intermediate class used to get outputs from merge_fields_by_label operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.merge_fields_by_label() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_merged_field_support = op.outputs.merged_field_support() - """ - def __init__(self, op: Operator): - super().__init__(merge_fields_by_label._spec().outputs, op) - self._fields_container = Output(merge_fields_by_label._spec().output_pin(0), 0, op) - self._outputs.append(self._fields_container) - self._merged_field_support = Output(merge_fields_by_label._spec().output_pin(1), 1, op) - self._outputs.append(self._merged_field_support) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - - Returns - ---------- - my_fields_container : FieldsContainer, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.merge_fields_by_label() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - return self._fields_container - - @property - def merged_field_support(self): - """Allows to get merged_field_support output of the operator - - - Returns - ---------- - my_merged_field_support : AbstractFieldSupport, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.merge_fields_by_label() - >>> # Connect inputs : op.inputs. ... - >>> result_merged_field_support = op.outputs.merged_field_support() - """ - return self._merged_field_support - diff --git a/ansys/dpf/core/operators/logic/solid_shell_fields.py b/ansys/dpf/core/operators/logic/solid_shell_fields.py index 49ed6ddbfb4..c67f8943f0b 100644 --- a/ansys/dpf/core/operators/logic/solid_shell_fields.py +++ b/ansys/dpf/core/operators/logic/solid_shell_fields.py @@ -1,60 +1,91 @@ """ solid_shell_fields -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "logic" category -""" class solid_shell_fields(Operator): - """Makes a fields based on fields container containing shell and solid fields with respect to time steps/frequencies. + """Makes a fields based on fields container containing shell and solid + fields with respect to time steps/frequencies. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.logic.solid_shell_fields() - >>> # Instantiate operator - >>> op = dpf.operators.logic.solid_shell_fields() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.logic.solid_shell_fields( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.logic.solid_shell_fields(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="merge::solid_shell_fields", config = config, server = server) + super().__init__(name="merge::solid_shell_fields", config=config, server=server) self._inputs = InputsSolidShellFields(self) self._outputs = OutputsSolidShellFields(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Makes a fields based on fields container containing shell and solid fields with respect to time steps/frequencies.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Makes a fields based on fields container containing shell and solid + fields with respect to time steps/frequencies.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "merge::solid_shell_fields") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::solid_shell_fields", server=server) @property def inputs(self): @@ -62,91 +93,89 @@ def inputs(self): Returns -------- - inputs : InputsSolidShellFields + inputs : InputsSolidShellFields """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSolidShellFields + outputs : OutputsSolidShellFields """ return super().outputs -#internal name: merge::solid_shell_fields -#scripting name: solid_shell_fields class InputsSolidShellFields(_Inputs): - """Intermediate class used to connect user inputs to solid_shell_fields operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.solid_shell_fields() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + solid_shell_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.solid_shell_fields() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(solid_shell_fields._spec().inputs, op) - self._fields_container = Input(solid_shell_fields._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + solid_shell_fields._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.solid_shell_fields() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsSolidShellFields(_Outputs): - """Intermediate class used to get outputs from solid_shell_fields operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.logic.solid_shell_fields() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + solid_shell_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.logic.solid_shell_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(solid_shell_fields._spec().outputs, op) - self._fields_container = Output(solid_shell_fields._spec().output_pin(0), 0, op) + self._fields_container = Output(solid_shell_fields._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.logic.solid_shell_fields() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/mapping/__init__.py b/ansys/dpf/core/operators/mapping/__init__.py index e0e3d932f36..54ff34c4251 100644 --- a/ansys/dpf/core/operators/mapping/__init__.py +++ b/ansys/dpf/core/operators/mapping/__init__.py @@ -1,3 +1,5 @@ -from .solid_to_skin import solid_to_skin +from .find_reduced_coordinates import find_reduced_coordinates +from .on_reduced_coordinates import on_reduced_coordinates from .on_coordinates import on_coordinates from .scoping_on_coordinates import scoping_on_coordinates +from .solid_to_skin import solid_to_skin diff --git a/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py b/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py new file mode 100644 index 00000000000..5f3b05d6bd9 --- /dev/null +++ b/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py @@ -0,0 +1,328 @@ +""" +find_reduced_coordinates +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class find_reduced_coordinates(Operator): + """Find the elements corresponding to the given coordinates in input and + compute their reduced coordinates in those elements. + + Parameters + ---------- + coordinates : Field or FieldsContainer or MeshedRegion or MeshesContainer + mesh : MeshedRegion or MeshesContainer, optional + If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container + use_quadratic_elements : bool + If this pin is set to true reduced + coordinates are computed on the + quadratic element if the element is + quadratic (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mapping.find_reduced_coordinates() + + >>> # Make input connections + >>> my_coordinates = dpf.Field() + >>> op.inputs.coordinates.connect(my_coordinates) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_use_quadratic_elements = bool() + >>> op.inputs.use_quadratic_elements.connect(my_use_quadratic_elements) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mapping.find_reduced_coordinates( + ... coordinates=my_coordinates, + ... mesh=my_mesh, + ... use_quadratic_elements=my_use_quadratic_elements, + ... ) + + >>> # Get output data + >>> result_reduced_coordinates = op.outputs.reduced_coordinates() + >>> result_element_ids = op.outputs.element_ids() + """ + + def __init__( + self, + coordinates=None, + mesh=None, + use_quadratic_elements=None, + config=None, + server=None, + ): + super().__init__(name="find_reduced_coordinates", config=config, server=server) + self._inputs = InputsFindReducedCoordinates(self) + self._outputs = OutputsFindReducedCoordinates(self) + if coordinates is not None: + self.inputs.coordinates.connect(coordinates) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if use_quadratic_elements is not None: + self.inputs.use_quadratic_elements.connect(use_quadratic_elements) + + @staticmethod + def _spec(): + description = """Find the elements corresponding to the given coordinates in input and + compute their reduced coordinates in those elements.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="coordinates", + type_names=[ + "field", + "fields_container", + "abstract_meshed_region", + "meshes_container", + ], + optional=False, + document="""""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container""", + ), + 200: PinSpecification( + name="use_quadratic_elements", + type_names=["bool"], + optional=False, + document="""If this pin is set to true reduced + coordinates are computed on the + quadratic element if the element is + quadratic (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="reduced_coordinates", + type_names=["fields_container"], + optional=False, + document="""Coordinates in the reference elements""", + ), + 1: PinSpecification( + name="element_ids", + type_names=["scopings_container"], + optional=False, + document="""Ids of the elements where each set of reduced + coordinates is found""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="find_reduced_coordinates", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsFindReducedCoordinates + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsFindReducedCoordinates + """ + return super().outputs + + +class InputsFindReducedCoordinates(_Inputs): + """Intermediate class used to connect user inputs to + find_reduced_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.find_reduced_coordinates() + >>> my_coordinates = dpf.Field() + >>> op.inputs.coordinates.connect(my_coordinates) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_use_quadratic_elements = bool() + >>> op.inputs.use_quadratic_elements.connect(my_use_quadratic_elements) + """ + + def __init__(self, op: Operator): + super().__init__(find_reduced_coordinates._spec().inputs, op) + self._coordinates = Input( + find_reduced_coordinates._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._coordinates) + self._mesh = Input(find_reduced_coordinates._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._use_quadratic_elements = Input( + find_reduced_coordinates._spec().input_pin(200), 200, op, -1 + ) + self._inputs.append(self._use_quadratic_elements) + + @property + def coordinates(self): + """Allows to connect coordinates input to the operator. + + Parameters + ---------- + my_coordinates : Field or FieldsContainer or MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.find_reduced_coordinates() + >>> op.inputs.coordinates.connect(my_coordinates) + >>> # or + >>> op.inputs.coordinates(my_coordinates) + """ + return self._coordinates + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.find_reduced_coordinates() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def use_quadratic_elements(self): + """Allows to connect use_quadratic_elements input to the operator. + + If this pin is set to true reduced + coordinates are computed on the + quadratic element if the element is + quadratic (default is false) + + Parameters + ---------- + my_use_quadratic_elements : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.find_reduced_coordinates() + >>> op.inputs.use_quadratic_elements.connect(my_use_quadratic_elements) + >>> # or + >>> op.inputs.use_quadratic_elements(my_use_quadratic_elements) + """ + return self._use_quadratic_elements + + +class OutputsFindReducedCoordinates(_Outputs): + """Intermediate class used to get outputs from + find_reduced_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.find_reduced_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_reduced_coordinates = op.outputs.reduced_coordinates() + >>> result_element_ids = op.outputs.element_ids() + """ + + def __init__(self, op: Operator): + super().__init__(find_reduced_coordinates._spec().outputs, op) + self._reduced_coordinates = Output( + find_reduced_coordinates._spec().output_pin(0), 0, op + ) + self._outputs.append(self._reduced_coordinates) + self._element_ids = Output( + find_reduced_coordinates._spec().output_pin(1), 1, op + ) + self._outputs.append(self._element_ids) + + @property + def reduced_coordinates(self): + """Allows to get reduced_coordinates output of the operator + + Returns + ---------- + my_reduced_coordinates : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.find_reduced_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_reduced_coordinates = op.outputs.reduced_coordinates() + """ # noqa: E501 + return self._reduced_coordinates + + @property + def element_ids(self): + """Allows to get element_ids output of the operator + + Returns + ---------- + my_element_ids : ScopingsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.find_reduced_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_element_ids = op.outputs.element_ids() + """ # noqa: E501 + return self._element_ids diff --git a/ansys/dpf/core/operators/mapping/on_coordinates.py b/ansys/dpf/core/operators/mapping/on_coordinates.py index 6ba15d17a7d..8ebfe185b3e 100644 --- a/ansys/dpf/core/operators/mapping/on_coordinates.py +++ b/ansys/dpf/core/operators/mapping/on_coordinates.py @@ -1,84 +1,178 @@ """ on_coordinates -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "mapping" category -""" class on_coordinates(Operator): - """Evaluates a result on specified coordinates (interpolates results inside elements with shape functions). - - available inputs: - - fields_container (FieldsContainer) - - coordinates (Field, FieldsContainer) - - create_support (bool) (optional) - - mapping_on_scoping (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mapping.on_coordinates() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_coordinates = dpf.Field() - >>> op.inputs.coordinates.connect(my_coordinates) - >>> my_create_support = bool() - >>> op.inputs.create_support.connect(my_create_support) - >>> my_mapping_on_scoping = bool() - >>> op.inputs.mapping_on_scoping.connect(my_mapping_on_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mapping.on_coordinates(fields_container=my_fields_container,coordinates=my_coordinates,create_support=my_create_support,mapping_on_scoping=my_mapping_on_scoping,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, coordinates=None, create_support=None, mapping_on_scoping=None, mesh=None, config=None, server=None): - super().__init__(name="mapping", config = config, server = server) + """Evaluates a result on specified coordinates (interpolates results + inside elements with shape functions). + + Parameters + ---------- + fields_container : FieldsContainer + coordinates : Field or FieldsContainer or MeshedRegion or MeshesContainer + create_support : bool, optional + If this pin is set to true, then, a support + associated to the fields consisting + of points is created + mapping_on_scoping : bool, optional + If this pin is set to true, then the mapping + between the coordinates and the + fields is created only on the first + field scoping + mesh : MeshedRegion or MeshesContainer, optional + If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mapping.on_coordinates() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_coordinates = dpf.Field() + >>> op.inputs.coordinates.connect(my_coordinates) + >>> my_create_support = bool() + >>> op.inputs.create_support.connect(my_create_support) + >>> my_mapping_on_scoping = bool() + >>> op.inputs.mapping_on_scoping.connect(my_mapping_on_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mapping.on_coordinates( + ... fields_container=my_fields_container, + ... coordinates=my_coordinates, + ... create_support=my_create_support, + ... mapping_on_scoping=my_mapping_on_scoping, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + coordinates=None, + create_support=None, + mapping_on_scoping=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapping", config=config, server=server) self._inputs = InputsOnCoordinates(self) self._outputs = OutputsOnCoordinates(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if coordinates !=None: + if coordinates is not None: self.inputs.coordinates.connect(coordinates) - if create_support !=None: + if create_support is not None: self.inputs.create_support.connect(create_support) - if mapping_on_scoping !=None: + if mapping_on_scoping is not None: self.inputs.mapping_on_scoping.connect(mapping_on_scoping) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Evaluates a result on specified coordinates (interpolates results inside elements with shape functions).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "coordinates", type_names=["field","fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "create_support", type_names=["bool"], optional=True, document="""if this pin is set to true, then, a support associated to the fields consisting of points is created"""), - 3 : PinSpecification(name = "mapping_on_scoping", type_names=["bool"], optional=True, document="""if this pin is set to true, then the mapping between the coordinates and the fields is created only on the first field scoping"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluates a result on specified coordinates (interpolates results + inside elements with shape functions).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="coordinates", + type_names=[ + "field", + "fields_container", + "abstract_meshed_region", + "meshes_container", + ], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="create_support", + type_names=["bool"], + optional=True, + document="""If this pin is set to true, then, a support + associated to the fields consisting + of points is created""", + ), + 3: PinSpecification( + name="mapping_on_scoping", + type_names=["bool"], + optional=True, + document="""If this pin is set to true, then the mapping + between the coordinates and the + fields is created only on the first + field scoping""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapping") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapping", server=server) @property def inputs(self): @@ -86,193 +180,192 @@ def inputs(self): Returns -------- - inputs : InputsOnCoordinates + inputs : InputsOnCoordinates """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsOnCoordinates + outputs : OutputsOnCoordinates """ return super().outputs -#internal name: mapping -#scripting name: on_coordinates class InputsOnCoordinates(_Inputs): - """Intermediate class used to connect user inputs to on_coordinates operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mapping.on_coordinates() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_coordinates = dpf.Field() - >>> op.inputs.coordinates.connect(my_coordinates) - >>> my_create_support = bool() - >>> op.inputs.create_support.connect(my_create_support) - >>> my_mapping_on_scoping = bool() - >>> op.inputs.mapping_on_scoping.connect(my_mapping_on_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + on_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_coordinates() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_coordinates = dpf.Field() + >>> op.inputs.coordinates.connect(my_coordinates) + >>> my_create_support = bool() + >>> op.inputs.create_support.connect(my_create_support) + >>> my_mapping_on_scoping = bool() + >>> op.inputs.mapping_on_scoping.connect(my_mapping_on_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(on_coordinates._spec().inputs, op) - self._fields_container = Input(on_coordinates._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(on_coordinates._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._coordinates = Input(on_coordinates._spec().input_pin(1), 1, op, -1) + self._coordinates = Input(on_coordinates._spec().input_pin(1), 1, op, -1) self._inputs.append(self._coordinates) - self._create_support = Input(on_coordinates._spec().input_pin(2), 2, op, -1) + self._create_support = Input(on_coordinates._spec().input_pin(2), 2, op, -1) self._inputs.append(self._create_support) - self._mapping_on_scoping = Input(on_coordinates._spec().input_pin(3), 3, op, -1) + self._mapping_on_scoping = Input(on_coordinates._spec().input_pin(3), 3, op, -1) self._inputs.append(self._mapping_on_scoping) - self._mesh = Input(on_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh = Input(on_coordinates._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.on_coordinates() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def coordinates(self): - """Allows to connect coordinates input to the operator + """Allows to connect coordinates input to the operator. Parameters ---------- - my_coordinates : Field, FieldsContainer, + my_coordinates : Field or FieldsContainer or MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.on_coordinates() >>> op.inputs.coordinates.connect(my_coordinates) - >>> #or + >>> # or >>> op.inputs.coordinates(my_coordinates) - """ return self._coordinates @property def create_support(self): - """Allows to connect create_support input to the operator + """Allows to connect create_support input to the operator. - - pindoc: if this pin is set to true, then, a support associated to the fields consisting of points is created + If this pin is set to true, then, a support + associated to the fields consisting + of points is created Parameters ---------- - my_create_support : bool, + my_create_support : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.on_coordinates() >>> op.inputs.create_support.connect(my_create_support) - >>> #or + >>> # or >>> op.inputs.create_support(my_create_support) - """ return self._create_support @property def mapping_on_scoping(self): - """Allows to connect mapping_on_scoping input to the operator + """Allows to connect mapping_on_scoping input to the operator. - - pindoc: if this pin is set to true, then the mapping between the coordinates and the fields is created only on the first field scoping + If this pin is set to true, then the mapping + between the coordinates and the + fields is created only on the first + field scoping Parameters ---------- - my_mapping_on_scoping : bool, + my_mapping_on_scoping : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.on_coordinates() >>> op.inputs.mapping_on_scoping.connect(my_mapping_on_scoping) - >>> #or + >>> # or >>> op.inputs.mapping_on_scoping(my_mapping_on_scoping) - """ return self._mapping_on_scoping @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container + If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.on_coordinates() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsOnCoordinates(_Outputs): - """Intermediate class used to get outputs from on_coordinates operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mapping.on_coordinates() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + on_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(on_coordinates._spec().outputs, op) - self._fields_container = Output(on_coordinates._spec().output_pin(0), 0, op) + self._fields_container = Output(on_coordinates._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.on_coordinates() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py b/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py new file mode 100644 index 00000000000..acbdf116492 --- /dev/null +++ b/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py @@ -0,0 +1,389 @@ +""" +on_reduced_coordinates +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class on_reduced_coordinates(Operator): + """Evaluates a result on specified reduced coordinates of given elements + (interpolates results inside elements with shape functions). + + Parameters + ---------- + fields_container : FieldsContainer + reduced_coordinates : Field or FieldsContainer + Coordinates in the reference elements to find + (found with the operator + "find_reduced_coordinates") + element_ids : ScopingsContainer + Ids of the elements where each set of reduced + coordinates is found (found with the + operator "find_reduced_coordinates") + mesh : MeshedRegion or MeshesContainer, optional + If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container + use_quadratic_elements : bool + If this pin is set to true interpolation is + computed on the quadratic element if + the element is quadratic (default is + false). to use only when results have + mid side nodes values. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mapping.on_reduced_coordinates() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_reduced_coordinates = dpf.Field() + >>> op.inputs.reduced_coordinates.connect(my_reduced_coordinates) + >>> my_element_ids = dpf.ScopingsContainer() + >>> op.inputs.element_ids.connect(my_element_ids) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_use_quadratic_elements = bool() + >>> op.inputs.use_quadratic_elements.connect(my_use_quadratic_elements) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mapping.on_reduced_coordinates( + ... fields_container=my_fields_container, + ... reduced_coordinates=my_reduced_coordinates, + ... element_ids=my_element_ids, + ... mesh=my_mesh, + ... use_quadratic_elements=my_use_quadratic_elements, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + reduced_coordinates=None, + element_ids=None, + mesh=None, + use_quadratic_elements=None, + config=None, + server=None, + ): + super().__init__(name="interpolation_operator", config=config, server=server) + self._inputs = InputsOnReducedCoordinates(self) + self._outputs = OutputsOnReducedCoordinates(self) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if reduced_coordinates is not None: + self.inputs.reduced_coordinates.connect(reduced_coordinates) + if element_ids is not None: + self.inputs.element_ids.connect(element_ids) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if use_quadratic_elements is not None: + self.inputs.use_quadratic_elements.connect(use_quadratic_elements) + + @staticmethod + def _spec(): + description = """Evaluates a result on specified reduced coordinates of given elements + (interpolates results inside elements with shape + functions).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="reduced_coordinates", + type_names=["field", "fields_container"], + optional=False, + document="""Coordinates in the reference elements to find + (found with the operator + "find_reduced_coordinates")""", + ), + 2: PinSpecification( + name="element_ids", + type_names=["scopings_container"], + optional=False, + document="""Ids of the elements where each set of reduced + coordinates is found (found with the + operator "find_reduced_coordinates")""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container""", + ), + 200: PinSpecification( + name="use_quadratic_elements", + type_names=["bool"], + optional=False, + document="""If this pin is set to true interpolation is + computed on the quadratic element if + the element is quadratic (default is + false). to use only when results have + mid side nodes values.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="interpolation_operator", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsOnReducedCoordinates + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsOnReducedCoordinates + """ + return super().outputs + + +class InputsOnReducedCoordinates(_Inputs): + """Intermediate class used to connect user inputs to + on_reduced_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_reduced_coordinates() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_reduced_coordinates = dpf.Field() + >>> op.inputs.reduced_coordinates.connect(my_reduced_coordinates) + >>> my_element_ids = dpf.ScopingsContainer() + >>> op.inputs.element_ids.connect(my_element_ids) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_use_quadratic_elements = bool() + >>> op.inputs.use_quadratic_elements.connect(my_use_quadratic_elements) + """ + + def __init__(self, op: Operator): + super().__init__(on_reduced_coordinates._spec().inputs, op) + self._fields_container = Input( + on_reduced_coordinates._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._fields_container) + self._reduced_coordinates = Input( + on_reduced_coordinates._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._reduced_coordinates) + self._element_ids = Input( + on_reduced_coordinates._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._element_ids) + self._mesh = Input(on_reduced_coordinates._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._use_quadratic_elements = Input( + on_reduced_coordinates._spec().input_pin(200), 200, op, -1 + ) + self._inputs.append(self._use_quadratic_elements) + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_reduced_coordinates() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def reduced_coordinates(self): + """Allows to connect reduced_coordinates input to the operator. + + Coordinates in the reference elements to find + (found with the operator + "find_reduced_coordinates") + + Parameters + ---------- + my_reduced_coordinates : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_reduced_coordinates() + >>> op.inputs.reduced_coordinates.connect(my_reduced_coordinates) + >>> # or + >>> op.inputs.reduced_coordinates(my_reduced_coordinates) + """ + return self._reduced_coordinates + + @property + def element_ids(self): + """Allows to connect element_ids input to the operator. + + Ids of the elements where each set of reduced + coordinates is found (found with the + operator "find_reduced_coordinates") + + Parameters + ---------- + my_element_ids : ScopingsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_reduced_coordinates() + >>> op.inputs.element_ids.connect(my_element_ids) + >>> # or + >>> op.inputs.element_ids(my_element_ids) + """ + return self._element_ids + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + If the first field in input has no mesh in + support, then the mesh in this pin is + expected (default is false), if a + meshes container with several meshes + is set, it should be on the same + label spaces as the coordinates + fields container + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_reduced_coordinates() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def use_quadratic_elements(self): + """Allows to connect use_quadratic_elements input to the operator. + + If this pin is set to true interpolation is + computed on the quadratic element if + the element is quadratic (default is + false). to use only when results have + mid side nodes values. + + Parameters + ---------- + my_use_quadratic_elements : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_reduced_coordinates() + >>> op.inputs.use_quadratic_elements.connect(my_use_quadratic_elements) + >>> # or + >>> op.inputs.use_quadratic_elements(my_use_quadratic_elements) + """ + return self._use_quadratic_elements + + +class OutputsOnReducedCoordinates(_Outputs): + """Intermediate class used to get outputs from + on_reduced_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_reduced_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(on_reduced_coordinates._spec().outputs, op) + self._fields_container = Output( + on_reduced_coordinates._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.on_reduced_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py b/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py index 6532d72e100..ef24124f89b 100644 --- a/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py +++ b/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py @@ -1,66 +1,101 @@ """ scoping_on_coordinates -====================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "mapping" category -""" class scoping_on_coordinates(Operator): """Finds the Elemental scoping of a set of coordinates. - available inputs: - - coordinates (Field) - - mesh (MeshedRegion) + Parameters + ---------- + coordinates : Field + mesh : MeshedRegion + - available outputs: - - scoping (Scoping) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.mapping.scoping_on_coordinates() - >>> # Instantiate operator - >>> op = dpf.operators.mapping.scoping_on_coordinates() + >>> # Make input connections + >>> my_coordinates = dpf.Field() + >>> op.inputs.coordinates.connect(my_coordinates) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) - >>> # Make input connections - >>> my_coordinates = dpf.Field() - >>> op.inputs.coordinates.connect(my_coordinates) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mapping.scoping_on_coordinates( + ... coordinates=my_coordinates, + ... mesh=my_mesh, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mapping.scoping_on_coordinates(coordinates=my_coordinates,mesh=my_mesh) + >>> # Get output data + >>> result_scoping = op.outputs.scoping() + """ - >>> # Get output data - >>> result_scoping = op.outputs.scoping()""" def __init__(self, coordinates=None, mesh=None, config=None, server=None): - super().__init__(name="scoping::on_coordinates", config = config, server = server) + super().__init__(name="scoping::on_coordinates", config=config, server=server) self._inputs = InputsScopingOnCoordinates(self) self._outputs = OutputsScopingOnCoordinates(self) - if coordinates !=None: + if coordinates is not None: self.inputs.coordinates.connect(coordinates) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Finds the Elemental scoping of a set of coordinates.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "coordinates", type_names=["field"], optional=False, document=""""""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """Finds the Elemental scoping of a set of coordinates.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="coordinates", + type_names=["field"], + optional=False, + document="""""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scoping::on_coordinates") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scoping::on_coordinates", server=server) @property def inputs(self): @@ -68,115 +103,111 @@ def inputs(self): Returns -------- - inputs : InputsScopingOnCoordinates + inputs : InputsScopingOnCoordinates """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScopingOnCoordinates + outputs : OutputsScopingOnCoordinates """ return super().outputs -#internal name: scoping::on_coordinates -#scripting name: scoping_on_coordinates class InputsScopingOnCoordinates(_Inputs): - """Intermediate class used to connect user inputs to scoping_on_coordinates operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mapping.scoping_on_coordinates() - >>> my_coordinates = dpf.Field() - >>> op.inputs.coordinates.connect(my_coordinates) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + scoping_on_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.scoping_on_coordinates() + >>> my_coordinates = dpf.Field() + >>> op.inputs.coordinates.connect(my_coordinates) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(scoping_on_coordinates._spec().inputs, op) - self._coordinates = Input(scoping_on_coordinates._spec().input_pin(0), 0, op, -1) + self._coordinates = Input( + scoping_on_coordinates._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._coordinates) - self._mesh = Input(scoping_on_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh = Input(scoping_on_coordinates._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def coordinates(self): - """Allows to connect coordinates input to the operator + """Allows to connect coordinates input to the operator. Parameters ---------- - my_coordinates : Field, + my_coordinates : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.scoping_on_coordinates() >>> op.inputs.coordinates.connect(my_coordinates) - >>> #or + >>> # or >>> op.inputs.coordinates(my_coordinates) - """ return self._coordinates @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.scoping_on_coordinates() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsScopingOnCoordinates(_Outputs): - """Intermediate class used to get outputs from scoping_on_coordinates operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mapping.scoping_on_coordinates() - >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() + """Intermediate class used to get outputs from + scoping_on_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.scoping_on_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_scoping = op.outputs.scoping() """ + def __init__(self, op: Operator): super().__init__(scoping_on_coordinates._spec().outputs, op) - self._scoping = Output(scoping_on_coordinates._spec().output_pin(0), 0, op) + self._scoping = Output(scoping_on_coordinates._spec().output_pin(0), 0, op) self._outputs.append(self._scoping) @property def scoping(self): """Allows to get scoping output of the operator - Returns ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.scoping_on_coordinates() >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() - """ + >>> result_scoping = op.outputs.scoping() + """ # noqa: E501 return self._scoping - diff --git a/ansys/dpf/core/operators/mapping/solid_to_skin.py b/ansys/dpf/core/operators/mapping/solid_to_skin.py index 85a288e852c..874e296e5b4 100644 --- a/ansys/dpf/core/operators/mapping/solid_to_skin.py +++ b/ansys/dpf/core/operators/mapping/solid_to_skin.py @@ -1,66 +1,107 @@ """ solid_to_skin -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "mapping" category -""" class solid_to_skin(Operator): - """Maps a field defined on solid elements to a field defined on skin elements. - - available inputs: - - field (Field, FieldsContainer) - - mesh_scoping (MeshedRegion) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mapping.solid_to_skin() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.MeshedRegion() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mapping.solid_to_skin(field=my_field,mesh_scoping=my_mesh_scoping) + """Maps a field defined on solid elements to a field defined on skin + elements. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + mesh_scoping : MeshedRegion, optional + Skin mesh region expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mapping.solid_to_skin() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.MeshedRegion() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mapping.solid_to_skin( + ... field=my_field, + ... mesh_scoping=my_mesh_scoping, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="solid_to_skin", config = config, server = server) + super().__init__(name="solid_to_skin", config=config, server=server) self._inputs = InputsSolidToSkin(self) self._outputs = OutputsSolidToSkin(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) @staticmethod def _spec(): - spec = Specification(description="""Maps a field defined on solid elements to a field defined on skin elements.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["abstract_meshed_region"], optional=True, document="""skin mesh region expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Maps a field defined on solid elements to a field defined on skin + elements.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["abstract_meshed_region"], + optional=True, + document="""Skin mesh region expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "solid_to_skin") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="solid_to_skin", server=server) @property def inputs(self): @@ -68,119 +109,114 @@ def inputs(self): Returns -------- - inputs : InputsSolidToSkin + inputs : InputsSolidToSkin """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSolidToSkin + outputs : OutputsSolidToSkin """ return super().outputs -#internal name: solid_to_skin -#scripting name: solid_to_skin class InputsSolidToSkin(_Inputs): - """Intermediate class used to connect user inputs to solid_to_skin operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mapping.solid_to_skin() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_mesh_scoping = dpf.MeshedRegion() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + solid_to_skin operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.solid_to_skin() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_mesh_scoping = dpf.MeshedRegion() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) """ + def __init__(self, op: Operator): super().__init__(solid_to_skin._spec().inputs, op) - self._field = Input(solid_to_skin._spec().input_pin(0), 0, op, -1) + self._field = Input(solid_to_skin._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._mesh_scoping = Input(solid_to_skin._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(solid_to_skin._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.solid_to_skin() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: skin mesh region expected + Skin mesh region expected Parameters ---------- - my_mesh_scoping : MeshedRegion, + my_mesh_scoping : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.solid_to_skin() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + class OutputsSolidToSkin(_Outputs): - """Intermediate class used to get outputs from solid_to_skin operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mapping.solid_to_skin() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + solid_to_skin operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mapping.solid_to_skin() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(solid_to_skin._spec().outputs, op) - self._field = Output(solid_to_skin._spec().output_pin(0), 0, op) + self._field = Output(solid_to_skin._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mapping.solid_to_skin() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/__init__.py b/ansys/dpf/core/operators/math/__init__.py index e90b0e00336..04cbfeb69fb 100644 --- a/ansys/dpf/core/operators/math/__init__.py +++ b/ansys/dpf/core/operators/math/__init__.py @@ -1,15 +1,14 @@ -from .minus import minus +from .minus import minus from .cplx_multiply import cplx_multiply from .unit_convert import unit_convert -from .minus_fc import minus_fc +from .minus_fc import minus_fc from .accumulate import accumulate from .unit_convert_fc import unit_convert_fc from .accumulate_min_over_label_fc import accumulate_min_over_label_fc -from .add import add -from .add_fc import add_fc +from .add import add +from .add_fc import add_fc from .sin_fc import sin_fc from .add_constant import add_constant -from .invert_fc import invert_fc from .pow import pow from .add_constant_fc import add_constant_fc from .scale import scale @@ -19,7 +18,7 @@ from .sweeping_phase import sweeping_phase from .centroid_fc import centroid_fc from .sweeping_phase_fc import sweeping_phase_fc -from .sqr import sqr +from .sqr import sqr from .sin import sin from .cos import cos from .cos_fc import cos_fc @@ -29,6 +28,10 @@ from .norm import norm from .sqrt_fc import sqrt_fc from .norm_fc import norm_fc +from .exponential import exponential +from .exponential_fc import exponential_fc +from .ln import ln +from .ln_fc import ln_fc from .component_wise_divide import component_wise_divide from .component_wise_divide_fc import component_wise_divide_fc from .kronecker_prod import kronecker_prod @@ -36,27 +39,30 @@ from .conjugate import conjugate from .img_part import img_part from .amplitude import amplitude -from .cplx_add import cplx_add from .cplx_dot import cplx_dot from .cplx_divide import cplx_divide from .dot import dot from .cplx_derive import cplx_derive from .polar_to_cplx import polar_to_cplx from .amplitude_fc import amplitude_fc -from .scale_by_field import scale_by_field -from .generalized_inner_product_fc import generalized_inner_product_fc +from .generalized_inner_product_fc import generalized_inner_product_fc from .phase import phase -from .scale_by_field_fc import scale_by_field_fc from .phase_fc import phase_fc from .modulus import modulus from .accumulate_fc import accumulate_fc -from .generalized_inner_product import generalized_inner_product +from .generalized_inner_product import generalized_inner_product from .overall_dot import overall_dot -from .invert import invert from .dot_tensor import dot_tensor +from .scale_by_field import scale_by_field +from .scale_by_field_fc import scale_by_field_fc +from .invert import invert +from .invert_fc import invert_fc from .average_over_label_fc import average_over_label_fc from .accumulate_over_label_fc import accumulate_over_label_fc from .accumulate_level_over_label_fc import accumulate_level_over_label_fc +from .correlation import correlation +from .make_one_on_comp import make_one_on_comp +from .entity_extractor import entity_extractor from .modal_superposition import modal_superposition from .matrix_inverse import matrix_inverse from .qr_solve import qr_solve @@ -64,3 +70,39 @@ from .fft_gradient_eval import fft_gradient_eval from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax from .svd import svd +from .matrix_inverse import matrix_inverse +from .qr_solve import qr_solve +from .fft_eval import fft_eval +from .fft_gradient_eval import fft_gradient_eval +from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax +from .svd import svd +from .matrix_inverse import matrix_inverse +from .qr_solve import qr_solve +from .fft_eval import fft_eval +from .fft_gradient_eval import fft_gradient_eval +from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax +from .svd import svd +from .matrix_inverse import matrix_inverse +from .qr_solve import qr_solve +from .fft_eval import fft_eval +from .fft_gradient_eval import fft_gradient_eval +from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax +from .svd import svd +from .matrix_inverse import matrix_inverse +from .qr_solve import qr_solve +from .fft_eval import fft_eval +from .fft_gradient_eval import fft_gradient_eval +from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax +from .svd import svd +from .matrix_inverse import matrix_inverse +from .qr_solve import qr_solve +from .fft_eval import fft_eval +from .fft_gradient_eval import fft_gradient_eval +from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax +from .svd import svd +from .matrix_inverse import matrix_inverse +from .qr_solve import qr_solve +from .fft_eval import fft_eval +from .fft_gradient_eval import fft_gradient_eval +from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax +from .svd import svd diff --git a/ansys/dpf/core/operators/math/accumulate.py b/ansys/dpf/core/operators/math/accumulate.py index 9c5d23ef64d..fedef225c66 100644 --- a/ansys/dpf/core/operators/math/accumulate.py +++ b/ansys/dpf/core/operators/math/accumulate.py @@ -1,60 +1,107 @@ """ accumulate -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class accumulate(Operator): - """Sum all the elementary data of a field to get one elementary data at the end. - - available inputs: - - fieldA (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.accumulate() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.accumulate(fieldA=my_fieldA) + """Sum all the elementary data of a field to get one elementary data at + the end. + + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + ponderation : Field + Field + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.accumulate() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_ponderation = dpf.Field() + >>> op.inputs.ponderation.connect(my_ponderation) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.accumulate( + ... fieldA=my_fieldA, + ... ponderation=my_ponderation, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, fieldA=None, config=None, server=None): - super().__init__(name="accumulate", config = config, server = server) + def __init__(self, fieldA=None, ponderation=None, config=None, server=None): + super().__init__(name="accumulate", config=config, server=server) self._inputs = InputsAccumulate(self) self._outputs = OutputsAccumulate(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) + if ponderation is not None: + self.inputs.ponderation.connect(ponderation) @staticmethod def _spec(): - spec = Specification(description="""Sum all the elementary data of a field to get one elementary data at the end.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Sum all the elementary data of a field to get one elementary data at + the end.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="ponderation", + type_names=["field"], + optional=False, + document="""Field""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "accumulate") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="accumulate", server=server) @property def inputs(self): @@ -62,93 +109,114 @@ def inputs(self): Returns -------- - inputs : InputsAccumulate + inputs : InputsAccumulate """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccumulate + outputs : OutputsAccumulate """ return super().outputs -#internal name: accumulate -#scripting name: accumulate class InputsAccumulate(_Inputs): - """Intermediate class used to connect user inputs to accumulate operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) + """Intermediate class used to connect user inputs to + accumulate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_ponderation = dpf.Field() + >>> op.inputs.ponderation.connect(my_ponderation) """ + def __init__(self, op: Operator): super().__init__(accumulate._spec().inputs, op) - self._fieldA = Input(accumulate._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(accumulate._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) + self._ponderation = Input(accumulate._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._ponderation) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA + @property + def ponderation(self): + """Allows to connect ponderation input to the operator. + + Field + + Parameters + ---------- + my_ponderation : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate() + >>> op.inputs.ponderation.connect(my_ponderation) + >>> # or + >>> op.inputs.ponderation(my_ponderation) + """ + return self._ponderation + + class OutputsAccumulate(_Outputs): - """Intermediate class used to get outputs from accumulate operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + accumulate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(accumulate._spec().outputs, op) - self._field = Output(accumulate._spec().output_pin(0), 0, op) + self._field = Output(accumulate._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/accumulate_fc.py b/ansys/dpf/core/operators/math/accumulate_fc.py index 9032ab3a120..e13140f64ec 100644 --- a/ansys/dpf/core/operators/math/accumulate_fc.py +++ b/ansys/dpf/core/operators/math/accumulate_fc.py @@ -1,60 +1,109 @@ """ accumulate_fc -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class accumulate_fc(Operator): - """Sum all the elementary data of a field to get one elementary data at the end. - - available inputs: - - fields_container (FieldsContainer) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.accumulate_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.accumulate_fc(fields_container=my_fields_container) + """Sum all the elementary data of a field to get one elementary data at + the end. + + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + ponderation : Field + Field + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.accumulate_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_ponderation = dpf.Field() + >>> op.inputs.ponderation.connect(my_ponderation) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.accumulate_fc( + ... fields_container=my_fields_container, + ... ponderation=my_ponderation, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="accumulate_fc", config = config, server = server) + def __init__( + self, fields_container=None, ponderation=None, config=None, server=None + ): + super().__init__(name="accumulate_fc", config=config, server=server) self._inputs = InputsAccumulateFc(self) self._outputs = OutputsAccumulateFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) + if ponderation is not None: + self.inputs.ponderation.connect(ponderation) @staticmethod def _spec(): - spec = Specification(description="""Sum all the elementary data of a field to get one elementary data at the end.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Sum all the elementary data of a field to get one elementary data at + the end.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="ponderation", + type_names=["field"], + optional=False, + document="""Field""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "accumulate_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="accumulate_fc", server=server) @property def inputs(self): @@ -62,93 +111,114 @@ def inputs(self): Returns -------- - inputs : InputsAccumulateFc + inputs : InputsAccumulateFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccumulateFc + outputs : OutputsAccumulateFc """ return super().outputs -#internal name: accumulate_fc -#scripting name: accumulate_fc class InputsAccumulateFc(_Inputs): - """Intermediate class used to connect user inputs to accumulate_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + accumulate_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_ponderation = dpf.Field() + >>> op.inputs.ponderation.connect(my_ponderation) """ + def __init__(self, op: Operator): super().__init__(accumulate_fc._spec().inputs, op) - self._fields_container = Input(accumulate_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(accumulate_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) + self._ponderation = Input(accumulate_fc._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._ponderation) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + @property + def ponderation(self): + """Allows to connect ponderation input to the operator. + + Field + + Parameters + ---------- + my_ponderation : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_fc() + >>> op.inputs.ponderation.connect(my_ponderation) + >>> # or + >>> op.inputs.ponderation(my_ponderation) + """ + return self._ponderation + + class OutputsAccumulateFc(_Outputs): - """Intermediate class used to get outputs from accumulate_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + accumulate_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(accumulate_fc._spec().outputs, op) - self._fields_container = Output(accumulate_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(accumulate_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py b/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py index a0f1d158cda..8517d34c698 100644 --- a/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py +++ b/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py @@ -1,60 +1,103 @@ """ accumulate_level_over_label_fc -============================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class accumulate_level_over_label_fc(Operator): - """Compute the component-wise sum over all the fields having the same id for the label set in input in the fields container and apply 10.0xlog10(data/10xx-12) on the result. This computation can be incremental, if the input fields container is connected and the operator is ran several time, the output field will be on all the inputs connected + """Compute the component-wise sum over all the fields having the same id + for the label set in input in the fields container and apply + 10.0xlog10(data/10xx-12) on the result. This computation can be + incremental, if the input fields container is connected and the + operator is ran several time, the output field will be on all the + inputs connected + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.accumulate_level_over_label_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.accumulate_level_over_label_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.accumulate_level_over_label_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.accumulate_level_over_label_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="accumulate_level_over_label_fc", config = config, server = server) + super().__init__( + name="accumulate_level_over_label_fc", config=config, server=server + ) self._inputs = InputsAccumulateLevelOverLabelFc(self) self._outputs = OutputsAccumulateLevelOverLabelFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Compute the component-wise sum over all the fields having the same id for the label set in input in the fields container and apply 10.0xlog10(data/10xx-12) on the result. This computation can be incremental, if the input fields container is connected and the operator is ran several time, the output field will be on all the inputs connected""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the component-wise sum over all the fields having the same id + for the label set in input in the fields container and + apply 10.0xlog10(data/10xx-12) on the result. This + computation can be incremental, if the input fields + container is connected and the operator is ran several + time, the output field will be on all the inputs connected""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "accumulate_level_over_label_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="accumulate_level_over_label_fc", server=server + ) @property def inputs(self): @@ -62,91 +105,91 @@ def inputs(self): Returns -------- - inputs : InputsAccumulateLevelOverLabelFc + inputs : InputsAccumulateLevelOverLabelFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccumulateLevelOverLabelFc + outputs : OutputsAccumulateLevelOverLabelFc """ return super().outputs -#internal name: accumulate_level_over_label_fc -#scripting name: accumulate_level_over_label_fc class InputsAccumulateLevelOverLabelFc(_Inputs): - """Intermediate class used to connect user inputs to accumulate_level_over_label_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate_level_over_label_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + accumulate_level_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_level_over_label_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(accumulate_level_over_label_fc._spec().inputs, op) - self._fields_container = Input(accumulate_level_over_label_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + accumulate_level_over_label_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate_level_over_label_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsAccumulateLevelOverLabelFc(_Outputs): - """Intermediate class used to get outputs from accumulate_level_over_label_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate_level_over_label_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + accumulate_level_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_level_over_label_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(accumulate_level_over_label_fc._spec().outputs, op) - self._field = Output(accumulate_level_over_label_fc._spec().output_pin(0), 0, op) + self._field = Output( + accumulate_level_over_label_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate_level_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py b/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py index 186c46c903f..e66cbadd288 100644 --- a/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py +++ b/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py @@ -1,60 +1,102 @@ """ accumulate_min_over_label_fc -============================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class accumulate_min_over_label_fc(Operator): - """Compute the component-wise sum over all the fields having the same id for the label set in input in the fields container and take its opposite. This computation can be incremental, if the input fields container is connected and the operator is ran several time, the output field will be on all the inputs connected + """Compute the component-wise sum over all the fields having the same id + for the label set in input in the fields container and take its + opposite. This computation can be incremental, if the input fields + container is connected and the operator is ran several time, the + output field will be on all the inputs connected + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.accumulate_min_over_label_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.accumulate_min_over_label_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.accumulate_min_over_label_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.accumulate_min_over_label_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="accumulate_min_over_label_fc", config = config, server = server) + super().__init__( + name="accumulate_min_over_label_fc", config=config, server=server + ) self._inputs = InputsAccumulateMinOverLabelFc(self) self._outputs = OutputsAccumulateMinOverLabelFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Compute the component-wise sum over all the fields having the same id for the label set in input in the fields container and take its opposite. This computation can be incremental, if the input fields container is connected and the operator is ran several time, the output field will be on all the inputs connected""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the component-wise sum over all the fields having the same id + for the label set in input in the fields container and + take its opposite. This computation can be incremental, if + the input fields container is connected and the operator + is ran several time, the output field will be on all the + inputs connected""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "accumulate_min_over_label_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="accumulate_min_over_label_fc", server=server + ) @property def inputs(self): @@ -62,91 +104,89 @@ def inputs(self): Returns -------- - inputs : InputsAccumulateMinOverLabelFc + inputs : InputsAccumulateMinOverLabelFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccumulateMinOverLabelFc + outputs : OutputsAccumulateMinOverLabelFc """ return super().outputs -#internal name: accumulate_min_over_label_fc -#scripting name: accumulate_min_over_label_fc class InputsAccumulateMinOverLabelFc(_Inputs): - """Intermediate class used to connect user inputs to accumulate_min_over_label_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate_min_over_label_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + accumulate_min_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_min_over_label_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(accumulate_min_over_label_fc._spec().inputs, op) - self._fields_container = Input(accumulate_min_over_label_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + accumulate_min_over_label_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate_min_over_label_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsAccumulateMinOverLabelFc(_Outputs): - """Intermediate class used to get outputs from accumulate_min_over_label_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate_min_over_label_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + accumulate_min_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_min_over_label_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(accumulate_min_over_label_fc._spec().outputs, op) - self._field = Output(accumulate_min_over_label_fc._spec().output_pin(0), 0, op) + self._field = Output(accumulate_min_over_label_fc._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate_min_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/accumulate_over_label_fc.py b/ansys/dpf/core/operators/math/accumulate_over_label_fc.py index 7d3bf81762d..54eda5d396f 100644 --- a/ansys/dpf/core/operators/math/accumulate_over_label_fc.py +++ b/ansys/dpf/core/operators/math/accumulate_over_label_fc.py @@ -1,60 +1,97 @@ """ accumulate_over_label_fc -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class accumulate_over_label_fc(Operator): - """Compute the component-wise sum over all the fields having the same id for the label set in input in the fields container. This computation can be incremental, if the input fields container is connected and the operator is ran several time, the output field will be on all the inputs connected + """Compute the component-wise sum over all the fields having the same id + for the label set in input in the fields container. This + computation can be incremental, if the input fields container is + connected and the operator is ran several time, the output field + will be on all the inputs connected + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.accumulate_over_label_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.accumulate_over_label_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.accumulate_over_label_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.accumulate_over_label_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="accumulate_over_label_fc", config = config, server = server) + super().__init__(name="accumulate_over_label_fc", config=config, server=server) self._inputs = InputsAccumulateOverLabelFc(self) self._outputs = OutputsAccumulateOverLabelFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Compute the component-wise sum over all the fields having the same id for the label set in input in the fields container. This computation can be incremental, if the input fields container is connected and the operator is ran several time, the output field will be on all the inputs connected""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the component-wise sum over all the fields having the same id + for the label set in input in the fields container. This + computation can be incremental, if the input fields + container is connected and the operator is ran several + time, the output field will be on all the inputs connected""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "accumulate_over_label_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="accumulate_over_label_fc", server=server) @property def inputs(self): @@ -62,91 +99,89 @@ def inputs(self): Returns -------- - inputs : InputsAccumulateOverLabelFc + inputs : InputsAccumulateOverLabelFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccumulateOverLabelFc + outputs : OutputsAccumulateOverLabelFc """ return super().outputs -#internal name: accumulate_over_label_fc -#scripting name: accumulate_over_label_fc class InputsAccumulateOverLabelFc(_Inputs): - """Intermediate class used to connect user inputs to accumulate_over_label_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate_over_label_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + accumulate_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_over_label_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(accumulate_over_label_fc._spec().inputs, op) - self._fields_container = Input(accumulate_over_label_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + accumulate_over_label_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate_over_label_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsAccumulateOverLabelFc(_Outputs): - """Intermediate class used to get outputs from accumulate_over_label_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.accumulate_over_label_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + accumulate_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.accumulate_over_label_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(accumulate_over_label_fc._spec().outputs, op) - self._field = Output(accumulate_over_label_fc._spec().output_pin(0), 0, op) + self._field = Output(accumulate_over_label_fc._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.accumulate_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/add.py b/ansys/dpf/core/operators/math/add.py index 1cc3d70c0db..fa826c4ea9d 100644 --- a/ansys/dpf/core/operators/math/add.py +++ b/ansys/dpf/core/operators/math/add.py @@ -1,66 +1,126 @@ """ add -=== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class add(Operator): - """Computes the sum of two fields. If one field's scoping has 'overall' location, then these field's values are applied on the entire other field. if one of the input field is empty, the remaining is forwarded to the output. - - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.add() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.add(fieldA=my_fieldA,fieldB=my_fieldB) + """Computes the sum of two fields. If one field's scoping has 'overall' + location, then these field's values are applied on the entire + other field. If one of the input field is empty, the remaining is + forwarded to the output. When using a constant or 'work_by_index', + it's possible to use 'inplace' to reuse one of the fields. + + Parameters + ---------- + fieldA : Field or FieldsContainer or float + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.add() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.add( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="add", config = config, server = server) + super().__init__(name="add", config=config, server=server) self._inputs = InputsAdd(self) self._outputs = OutputsAdd(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes the sum of two fields. If one field's scoping has 'overall' location, then these field's values are applied on the entire other field. if one of the input field is empty, the remaining is forwarded to the output.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes the sum of two fields. If one field's scoping has 'overall' + location, then these field's values are applied on the + entire other field. If one of the input field is empty, + the remaining is forwarded to the output. When using a + constant or 'work_by_index', it's possible to use + 'inplace' to reuse one of the fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "add") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="add", server=server) @property def inputs(self): @@ -68,119 +128,115 @@ def inputs(self): Returns -------- - inputs : InputsAdd + inputs : InputsAdd """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAdd + outputs : OutputsAdd """ return super().outputs -#internal name: add -#scripting name: add class InputsAdd(_Inputs): - """Intermediate class used to connect user inputs to add operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.add() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + add operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.add() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(add._spec().inputs, op) - self._fieldA = Input(add._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(add._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(add._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(add._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsAdd(_Outputs): - """Intermediate class used to get outputs from add operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.add() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + add operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.add() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(add._spec().outputs, op) - self._field = Output(add._spec().output_pin(0), 0, op) + self._field = Output(add._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/add_constant.py b/ansys/dpf/core/operators/math/add_constant.py index 3193aa88b0e..17a5a61f698 100644 --- a/ansys/dpf/core/operators/math/add_constant.py +++ b/ansys/dpf/core/operators/math/add_constant.py @@ -1,66 +1,105 @@ """ add_constant -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class add_constant(Operator): """Computes the sum of a field (in 0) and a scalar (in 1). - available inputs: - - field (Field, FieldsContainer) - - ponderation (float, list) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + ponderation : float + Double or vector of double + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.add_constant() - >>> # Instantiate operator - >>> op = dpf.operators.math.add_constant() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_ponderation = float() + >>> op.inputs.ponderation.connect(my_ponderation) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_ponderation = float() - >>> op.inputs.ponderation.connect(my_ponderation) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.add_constant( + ... field=my_field, + ... ponderation=my_ponderation, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.add_constant(field=my_field,ponderation=my_ponderation) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, ponderation=None, config=None, server=None): - super().__init__(name="add_constant", config = config, server = server) + super().__init__(name="add_constant", config=config, server=server) self._inputs = InputsAddConstant(self) self._outputs = OutputsAddConstant(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if ponderation !=None: + if ponderation is not None: self.inputs.ponderation.connect(ponderation) @staticmethod def _spec(): - spec = Specification(description="""Computes the sum of a field (in 0) and a scalar (in 1).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "ponderation", type_names=["double","vector"], optional=False, document="""double or vector of double""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes the sum of a field (in 0) and a scalar (in 1).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="ponderation", + type_names=["double", "vector"], + optional=False, + document="""Double or vector of double""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "add_constant") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="add_constant", server=server) @property def inputs(self): @@ -68,119 +107,114 @@ def inputs(self): Returns -------- - inputs : InputsAddConstant + inputs : InputsAddConstant """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAddConstant + outputs : OutputsAddConstant """ return super().outputs -#internal name: add_constant -#scripting name: add_constant class InputsAddConstant(_Inputs): - """Intermediate class used to connect user inputs to add_constant operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.add_constant() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_ponderation = float() - >>> op.inputs.ponderation.connect(my_ponderation) + """Intermediate class used to connect user inputs to + add_constant operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.add_constant() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_ponderation = float() + >>> op.inputs.ponderation.connect(my_ponderation) """ + def __init__(self, op: Operator): super().__init__(add_constant._spec().inputs, op) - self._field = Input(add_constant._spec().input_pin(0), 0, op, -1) + self._field = Input(add_constant._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._ponderation = Input(add_constant._spec().input_pin(1), 1, op, -1) + self._ponderation = Input(add_constant._spec().input_pin(1), 1, op, -1) self._inputs.append(self._ponderation) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_constant() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def ponderation(self): - """Allows to connect ponderation input to the operator + """Allows to connect ponderation input to the operator. - - pindoc: double or vector of double + Double or vector of double Parameters ---------- - my_ponderation : float, list, + my_ponderation : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_constant() >>> op.inputs.ponderation.connect(my_ponderation) - >>> #or + >>> # or >>> op.inputs.ponderation(my_ponderation) - """ return self._ponderation + class OutputsAddConstant(_Outputs): - """Intermediate class used to get outputs from add_constant operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.add_constant() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + add_constant operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.add_constant() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(add_constant._spec().outputs, op) - self._field = Output(add_constant._spec().output_pin(0), 0, op) + self._field = Output(add_constant._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_constant() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/add_constant_fc.py b/ansys/dpf/core/operators/math/add_constant_fc.py index 9e1b12ae257..b322af76fdd 100644 --- a/ansys/dpf/core/operators/math/add_constant_fc.py +++ b/ansys/dpf/core/operators/math/add_constant_fc.py @@ -1,66 +1,107 @@ """ add_constant_fc =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class add_constant_fc(Operator): """Computes the sum of a field (in 0) and a scalar (in 1). - available inputs: - - fields_container (FieldsContainer) - - ponderation (float, list) + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + ponderation : float + Double or vector of double + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.add_constant_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.add_constant_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_ponderation = float() + >>> op.inputs.ponderation.connect(my_ponderation) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_ponderation = float() - >>> op.inputs.ponderation.connect(my_ponderation) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.add_constant_fc( + ... fields_container=my_fields_container, + ... ponderation=my_ponderation, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.add_constant_fc(fields_container=my_fields_container,ponderation=my_ponderation) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, ponderation=None, config=None, server=None): - super().__init__(name="add_constant_fc", config = config, server = server) + def __init__( + self, fields_container=None, ponderation=None, config=None, server=None + ): + super().__init__(name="add_constant_fc", config=config, server=server) self._inputs = InputsAddConstantFc(self) self._outputs = OutputsAddConstantFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if ponderation !=None: + if ponderation is not None: self.inputs.ponderation.connect(ponderation) @staticmethod def _spec(): - spec = Specification(description="""Computes the sum of a field (in 0) and a scalar (in 1).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "ponderation", type_names=["double","vector"], optional=False, document="""double or vector of double""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes the sum of a field (in 0) and a scalar (in 1).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="ponderation", + type_names=["double", "vector"], + optional=False, + document="""Double or vector of double""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "add_constant_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="add_constant_fc", server=server) @property def inputs(self): @@ -68,119 +109,114 @@ def inputs(self): Returns -------- - inputs : InputsAddConstantFc + inputs : InputsAddConstantFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAddConstantFc + outputs : OutputsAddConstantFc """ return super().outputs -#internal name: add_constant_fc -#scripting name: add_constant_fc class InputsAddConstantFc(_Inputs): - """Intermediate class used to connect user inputs to add_constant_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.add_constant_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_ponderation = float() - >>> op.inputs.ponderation.connect(my_ponderation) + """Intermediate class used to connect user inputs to + add_constant_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.add_constant_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_ponderation = float() + >>> op.inputs.ponderation.connect(my_ponderation) """ + def __init__(self, op: Operator): super().__init__(add_constant_fc._spec().inputs, op) - self._fields_container = Input(add_constant_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(add_constant_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._ponderation = Input(add_constant_fc._spec().input_pin(1), 1, op, -1) + self._ponderation = Input(add_constant_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._ponderation) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_constant_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def ponderation(self): - """Allows to connect ponderation input to the operator + """Allows to connect ponderation input to the operator. - - pindoc: double or vector of double + Double or vector of double Parameters ---------- - my_ponderation : float, list, + my_ponderation : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_constant_fc() >>> op.inputs.ponderation.connect(my_ponderation) - >>> #or + >>> # or >>> op.inputs.ponderation(my_ponderation) - """ return self._ponderation + class OutputsAddConstantFc(_Outputs): - """Intermediate class used to get outputs from add_constant_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.add_constant_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + add_constant_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.add_constant_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(add_constant_fc._spec().outputs, op) - self._fields_container = Output(add_constant_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(add_constant_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_constant_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/add_fc.py b/ansys/dpf/core/operators/math/add_fc.py index 10514b7cb1e..2d146ad7b73 100644 --- a/ansys/dpf/core/operators/math/add_fc.py +++ b/ansys/dpf/core/operators/math/add_fc.py @@ -1,66 +1,118 @@ """ add_fc -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class add_fc(Operator): - """Select all fields having the same label space in the input fields container, and add those together. If fields, doubles, or vectors of doubles are put in input, they are added to all the fields. + """Select all fields having the same label space in the input fields + container, and add those together. If fields, doubles, or vectors + of doubles are put in input, they are added to all the fields. + + Parameters + ---------- + fields_container1 : FieldsContainer or Field or float + fields_container2 : FieldsContainer or Field or float - available inputs: - - fields_container1 (FieldsContainer, Field, float, list) - - fields_container2 (FieldsContainer, Field, float, list) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.add_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.add_fc() + >>> # Make input connections + >>> my_fields_container1 = dpf.FieldsContainer() + >>> op.inputs.fields_container1.connect(my_fields_container1) + >>> my_fields_container2 = dpf.FieldsContainer() + >>> op.inputs.fields_container2.connect(my_fields_container2) - >>> # Make input connections - >>> my_fields_container1 = dpf.FieldsContainer() - >>> op.inputs.fields_container1.connect(my_fields_container1) - >>> my_fields_container2 = dpf.FieldsContainer() - >>> op.inputs.fields_container2.connect(my_fields_container2) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.add_fc( + ... fields_container1=my_fields_container1, + ... fields_container2=my_fields_container2, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.add_fc(fields_container1=my_fields_container1,fields_container2=my_fields_container2) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container1=None, fields_container2=None, config=None, server=None): - super().__init__(name="add_fc", config = config, server = server) + def __init__( + self, fields_container1=None, fields_container2=None, config=None, server=None + ): + super().__init__(name="add_fc", config=config, server=server) self._inputs = InputsAddFc(self) self._outputs = OutputsAddFc(self) - if fields_container1 !=None: + if fields_container1 is not None: self.inputs.fields_container1.connect(fields_container1) - if fields_container2 !=None: + if fields_container2 is not None: self.inputs.fields_container2.connect(fields_container2) @staticmethod def _spec(): - spec = Specification(description="""Select all fields having the same label space in the input fields container, and add those together. If fields, doubles, or vectors of doubles are put in input, they are added to all the fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container","field","double","vector"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_container", type_names=["fields_container","field","double","vector"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Select all fields having the same label space in the input fields + container, and add those together. If fields, doubles, or + vectors of doubles are put in input, they are added to all + the fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=[ + "fields_container", + "field", + "double", + "vector", + ], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fields_container", + type_names=[ + "fields_container", + "field", + "double", + "vector", + ], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "add_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="add_fc", server=server) @property def inputs(self): @@ -68,115 +120,109 @@ def inputs(self): Returns -------- - inputs : InputsAddFc + inputs : InputsAddFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAddFc + outputs : OutputsAddFc """ return super().outputs -#internal name: add_fc -#scripting name: add_fc class InputsAddFc(_Inputs): - """Intermediate class used to connect user inputs to add_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.add_fc() - >>> my_fields_container1 = dpf.FieldsContainer() - >>> op.inputs.fields_container1.connect(my_fields_container1) - >>> my_fields_container2 = dpf.FieldsContainer() - >>> op.inputs.fields_container2.connect(my_fields_container2) + """Intermediate class used to connect user inputs to + add_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.add_fc() + >>> my_fields_container1 = dpf.FieldsContainer() + >>> op.inputs.fields_container1.connect(my_fields_container1) + >>> my_fields_container2 = dpf.FieldsContainer() + >>> op.inputs.fields_container2.connect(my_fields_container2) """ + def __init__(self, op: Operator): super().__init__(add_fc._spec().inputs, op) - self._fields_container1 = Input(add_fc._spec().input_pin(0), 0, op, 0) + self._fields_container1 = Input(add_fc._spec().input_pin(0), 0, op, 0) self._inputs.append(self._fields_container1) - self._fields_container2 = Input(add_fc._spec().input_pin(1), 1, op, 1) + self._fields_container2 = Input(add_fc._spec().input_pin(1), 1, op, 1) self._inputs.append(self._fields_container2) @property def fields_container1(self): - """Allows to connect fields_container1 input to the operator + """Allows to connect fields_container1 input to the operator. Parameters ---------- - my_fields_container1 : FieldsContainer, Field, float, list, + my_fields_container1 : FieldsContainer or Field or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_fc() >>> op.inputs.fields_container1.connect(my_fields_container1) - >>> #or + >>> # or >>> op.inputs.fields_container1(my_fields_container1) - """ return self._fields_container1 @property def fields_container2(self): - """Allows to connect fields_container2 input to the operator + """Allows to connect fields_container2 input to the operator. Parameters ---------- - my_fields_container2 : FieldsContainer, Field, float, list, + my_fields_container2 : FieldsContainer or Field or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_fc() >>> op.inputs.fields_container2.connect(my_fields_container2) - >>> #or + >>> # or >>> op.inputs.fields_container2(my_fields_container2) - """ return self._fields_container2 + class OutputsAddFc(_Outputs): - """Intermediate class used to get outputs from add_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.add_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + add_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.add_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(add_fc._spec().outputs, op) - self._fields_container = Output(add_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(add_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.add_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/amplitude.py b/ansys/dpf/core/operators/math/amplitude.py index 90fafae394f..2b241be9ae4 100644 --- a/ansys/dpf/core/operators/math/amplitude.py +++ b/ansys/dpf/core/operators/math/amplitude.py @@ -1,66 +1,107 @@ """ amplitude -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class amplitude(Operator): """Computes amplitude of a real and an imaginary field. - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.amplitude() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.amplitude(fieldA=my_fieldA,fieldB=my_fieldB) + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.amplitude() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.amplitude( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="amplitude", config = config, server = server) + super().__init__(name="amplitude", config=config, server=server) self._inputs = InputsAmplitude(self) self._outputs = OutputsAmplitude(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes amplitude of a real and an imaginary field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes amplitude of a real and an imaginary field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "amplitude") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="amplitude", server=server) @property def inputs(self): @@ -68,119 +109,115 @@ def inputs(self): Returns -------- - inputs : InputsAmplitude + inputs : InputsAmplitude """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAmplitude + outputs : OutputsAmplitude """ return super().outputs -#internal name: amplitude -#scripting name: amplitude class InputsAmplitude(_Inputs): - """Intermediate class used to connect user inputs to amplitude operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.amplitude() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + amplitude operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.amplitude() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(amplitude._spec().inputs, op) - self._fieldA = Input(amplitude._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(amplitude._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(amplitude._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(amplitude._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.amplitude() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.amplitude() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsAmplitude(_Outputs): - """Intermediate class used to get outputs from amplitude operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.amplitude() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + amplitude operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.amplitude() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(amplitude._spec().outputs, op) - self._field = Output(amplitude._spec().output_pin(0), 0, op) + self._field = Output(amplitude._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.amplitude() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/amplitude_fc.py b/ansys/dpf/core/operators/math/amplitude_fc.py index f6c358574c7..12dcac79471 100644 --- a/ansys/dpf/core/operators/math/amplitude_fc.py +++ b/ansys/dpf/core/operators/math/amplitude_fc.py @@ -1,60 +1,89 @@ """ amplitude_fc -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class amplitude_fc(Operator): """Computes amplitude of a real and an imaginary fields. - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.amplitude_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.amplitude_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.amplitude_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.amplitude_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="amplitude_fc", config = config, server = server) + super().__init__(name="amplitude_fc", config=config, server=server) self._inputs = InputsAmplitudeFc(self) self._outputs = OutputsAmplitudeFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes amplitude of a real and an imaginary fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes amplitude of a real and an imaginary fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "amplitude_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="amplitude_fc", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsAmplitudeFc + inputs : InputsAmplitudeFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAmplitudeFc + outputs : OutputsAmplitudeFc """ return super().outputs -#internal name: amplitude_fc -#scripting name: amplitude_fc class InputsAmplitudeFc(_Inputs): - """Intermediate class used to connect user inputs to amplitude_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.amplitude_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + amplitude_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.amplitude_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(amplitude_fc._spec().inputs, op) - self._fields_container = Input(amplitude_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(amplitude_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.amplitude_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsAmplitudeFc(_Outputs): - """Intermediate class used to get outputs from amplitude_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.amplitude_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + amplitude_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.amplitude_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(amplitude_fc._spec().outputs, op) - self._fields_container = Output(amplitude_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(amplitude_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.amplitude_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/average_over_label_fc.py b/ansys/dpf/core/operators/math/average_over_label_fc.py index 0cfb73c9bd8..60f683564b1 100644 --- a/ansys/dpf/core/operators/math/average_over_label_fc.py +++ b/ansys/dpf/core/operators/math/average_over_label_fc.py @@ -1,60 +1,97 @@ """ average_over_label_fc -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class average_over_label_fc(Operator): - """Compute the component-wise average over all the fields having the same id for the label set in input in the fields container. This computation can be incremental, if the input fields container is connected and the operator is ran several time, the output field will be on all the inputs connected + """Compute the component-wise average over all the fields having the same + id for the label set in input in the fields container. This + computation can be incremental, if the input fields container is + connected and the operator is ran several time, the output field + will be on all the inputs connected + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.average_over_label_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.average_over_label_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.average_over_label_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.average_over_label_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="average_over_label_fc", config = config, server = server) + super().__init__(name="average_over_label_fc", config=config, server=server) self._inputs = InputsAverageOverLabelFc(self) self._outputs = OutputsAverageOverLabelFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Compute the component-wise average over all the fields having the same id for the label set in input in the fields container. This computation can be incremental, if the input fields container is connected and the operator is ran several time, the output field will be on all the inputs connected""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Compute the component-wise average over all the fields having the same + id for the label set in input in the fields container. + This computation can be incremental, if the input fields + container is connected and the operator is ran several + time, the output field will be on all the inputs connected""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "average_over_label_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="average_over_label_fc", server=server) @property def inputs(self): @@ -62,91 +99,89 @@ def inputs(self): Returns -------- - inputs : InputsAverageOverLabelFc + inputs : InputsAverageOverLabelFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAverageOverLabelFc + outputs : OutputsAverageOverLabelFc """ return super().outputs -#internal name: average_over_label_fc -#scripting name: average_over_label_fc class InputsAverageOverLabelFc(_Inputs): - """Intermediate class used to connect user inputs to average_over_label_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.average_over_label_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + average_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.average_over_label_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(average_over_label_fc._spec().inputs, op) - self._fields_container = Input(average_over_label_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + average_over_label_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.average_over_label_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsAverageOverLabelFc(_Outputs): - """Intermediate class used to get outputs from average_over_label_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.average_over_label_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + average_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.average_over_label_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(average_over_label_fc._spec().outputs, op) - self._field = Output(average_over_label_fc._spec().output_pin(0), 0, op) + self._field = Output(average_over_label_fc._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.average_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/centroid.py b/ansys/dpf/core/operators/math/centroid.py index cf3b03c6597..fc8339c5c95 100644 --- a/ansys/dpf/core/operators/math/centroid.py +++ b/ansys/dpf/core/operators/math/centroid.py @@ -1,72 +1,122 @@ """ centroid -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class centroid(Operator): - """Computes centroid of field1 and field2, using fieldOut = field1*(1.-fact)+field2*(fact). - - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - factor (float) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.centroid() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - >>> my_factor = float() - >>> op.inputs.factor.connect(my_factor) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.centroid(fieldA=my_fieldA,fieldB=my_fieldB,factor=my_factor) + """Computes centroid of field1 and field2, using fieldOut = + field1*(1.-fact)+field2*(fact). + + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer + Field or fields container with only one field + is expected + factor : float + Scalar + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.centroid() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.centroid( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... factor=my_factor, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, factor=None, config=None, server=None): - super().__init__(name="centroid", config = config, server = server) + super().__init__(name="centroid", config=config, server=server) self._inputs = InputsCentroid(self) self._outputs = OutputsCentroid(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) - if factor !=None: + if factor is not None: self.inputs.factor.connect(factor) @staticmethod def _spec(): - spec = Specification(description="""Computes centroid of field1 and field2, using fieldOut = field1*(1.-fact)+field2*(fact).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 2 : PinSpecification(name = "factor", type_names=["double"], optional=False, document="""Scalar""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes centroid of field1 and field2, using fieldOut = + field1*(1.-fact)+field2*(fact).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 2: PinSpecification( + name="factor", + type_names=["double"], + optional=False, + document="""Scalar""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "centroid") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="centroid", server=server) @property def inputs(self): @@ -74,145 +124,139 @@ def inputs(self): Returns -------- - inputs : InputsCentroid + inputs : InputsCentroid """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCentroid + outputs : OutputsCentroid """ return super().outputs -#internal name: centroid -#scripting name: centroid class InputsCentroid(_Inputs): - """Intermediate class used to connect user inputs to centroid operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.centroid() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - >>> my_factor = float() - >>> op.inputs.factor.connect(my_factor) + """Intermediate class used to connect user inputs to + centroid operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.centroid() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) """ + def __init__(self, op: Operator): super().__init__(centroid._spec().inputs, op) - self._fieldA = Input(centroid._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(centroid._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(centroid._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(centroid._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) - self._factor = Input(centroid._spec().input_pin(2), 2, op, -1) + self._factor = Input(centroid._spec().input_pin(2), 2, op, -1) self._inputs.append(self._factor) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.centroid() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.centroid() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB @property def factor(self): - """Allows to connect factor input to the operator + """Allows to connect factor input to the operator. - - pindoc: Scalar + Scalar Parameters ---------- - my_factor : float, + my_factor : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.centroid() >>> op.inputs.factor.connect(my_factor) - >>> #or + >>> # or >>> op.inputs.factor(my_factor) - """ return self._factor + class OutputsCentroid(_Outputs): - """Intermediate class used to get outputs from centroid operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.centroid() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + centroid operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.centroid() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(centroid._spec().outputs, op) - self._field = Output(centroid._spec().output_pin(0), 0, op) + self._field = Output(centroid._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.centroid() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/centroid_fc.py b/ansys/dpf/core/operators/math/centroid_fc.py index 949b4b3afa9..08f8bc9d859 100644 --- a/ansys/dpf/core/operators/math/centroid_fc.py +++ b/ansys/dpf/core/operators/math/centroid_fc.py @@ -1,72 +1,119 @@ """ centroid_fc -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class centroid_fc(Operator): - """Computes the centroid of all the matching fields of a fields container at a given time or frequency, using fieldOut = field1*(1.-fact)+field2*(fact). - - available inputs: - - fields_container (FieldsContainer) - - time_freq (float) - - step (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.centroid_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_time_freq = float() - >>> op.inputs.time_freq.connect(my_time_freq) - >>> my_step = int() - >>> op.inputs.step.connect(my_step) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.centroid_fc(fields_container=my_fields_container,time_freq=my_time_freq,step=my_step) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, time_freq=None, step=None, config=None, server=None): - super().__init__(name="centroid_fc", config = config, server = server) + """Computes the centroid of all the matching fields of a fields container + at a given time or frequency, using fieldOut = + field1*(1.-fact)+field2*(fact). + + Parameters + ---------- + fields_container : FieldsContainer + time_freq : float + step : int, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.centroid_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_time_freq = float() + >>> op.inputs.time_freq.connect(my_time_freq) + >>> my_step = int() + >>> op.inputs.step.connect(my_step) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.centroid_fc( + ... fields_container=my_fields_container, + ... time_freq=my_time_freq, + ... step=my_step, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, fields_container=None, time_freq=None, step=None, config=None, server=None + ): + super().__init__(name="centroid_fc", config=config, server=server) self._inputs = InputsCentroidFc(self) self._outputs = OutputsCentroidFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if time_freq !=None: + if time_freq is not None: self.inputs.time_freq.connect(time_freq) - if step !=None: + if step is not None: self.inputs.step.connect(step) @staticmethod def _spec(): - spec = Specification(description="""Computes the centroid of all the matching fields of a fields container at a given time or frequency, using fieldOut = field1*(1.-fact)+field2*(fact).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "time_freq", type_names=["double"], optional=False, document=""""""), - 2 : PinSpecification(name = "step", type_names=["int32"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes the centroid of all the matching fields of a fields container + at a given time or frequency, using fieldOut = + field1*(1.-fact)+field2*(fact).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="time_freq", + type_names=["double"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="step", + type_names=["int32"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "centroid_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="centroid_fc", server=server) @property def inputs(self): @@ -74,139 +121,131 @@ def inputs(self): Returns -------- - inputs : InputsCentroidFc + inputs : InputsCentroidFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCentroidFc + outputs : OutputsCentroidFc """ return super().outputs -#internal name: centroid_fc -#scripting name: centroid_fc class InputsCentroidFc(_Inputs): - """Intermediate class used to connect user inputs to centroid_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.centroid_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_time_freq = float() - >>> op.inputs.time_freq.connect(my_time_freq) - >>> my_step = int() - >>> op.inputs.step.connect(my_step) + """Intermediate class used to connect user inputs to + centroid_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.centroid_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_time_freq = float() + >>> op.inputs.time_freq.connect(my_time_freq) + >>> my_step = int() + >>> op.inputs.step.connect(my_step) """ + def __init__(self, op: Operator): super().__init__(centroid_fc._spec().inputs, op) - self._fields_container = Input(centroid_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(centroid_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._time_freq = Input(centroid_fc._spec().input_pin(1), 1, op, -1) + self._time_freq = Input(centroid_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._time_freq) - self._step = Input(centroid_fc._spec().input_pin(2), 2, op, -1) + self._step = Input(centroid_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._step) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.centroid_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def time_freq(self): - """Allows to connect time_freq input to the operator + """Allows to connect time_freq input to the operator. Parameters ---------- - my_time_freq : float, + my_time_freq : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.centroid_fc() >>> op.inputs.time_freq.connect(my_time_freq) - >>> #or + >>> # or >>> op.inputs.time_freq(my_time_freq) - """ return self._time_freq @property def step(self): - """Allows to connect step input to the operator + """Allows to connect step input to the operator. Parameters ---------- - my_step : int, + my_step : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.centroid_fc() >>> op.inputs.step.connect(my_step) - >>> #or + >>> # or >>> op.inputs.step(my_step) - """ return self._step + class OutputsCentroidFc(_Outputs): - """Intermediate class used to get outputs from centroid_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.centroid_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + centroid_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.centroid_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(centroid_fc._spec().outputs, op) - self._fields_container = Output(centroid_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(centroid_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.centroid_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/component_wise_divide.py b/ansys/dpf/core/operators/math/component_wise_divide.py index 2be9089c73f..1c5ab1b81ff 100644 --- a/ansys/dpf/core/operators/math/component_wise_divide.py +++ b/ansys/dpf/core/operators/math/component_wise_divide.py @@ -1,66 +1,116 @@ """ component_wise_divide -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class component_wise_divide(Operator): - """Computes component-wise fraction between two fields of same dimensionality. If one field's scoping has overall location, then these field's values are applied on the entire other field. - - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.component_wise_divide() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.component_wise_divide(fieldA=my_fieldA,fieldB=my_fieldB) + """Computes component-wise fraction between two fields of same + dimensionality. If one field's scoping has overall location, then + these field's values are applied on the entire other field.When + using a constant or 'work_by_index', it's possible to use + 'inplace' to reuse one of the fields. + + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.component_wise_divide() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.component_wise_divide( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="component_wise_divide", config = config, server = server) + super().__init__(name="component_wise_divide", config=config, server=server) self._inputs = InputsComponentWiseDivide(self) self._outputs = OutputsComponentWiseDivide(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes component-wise fraction between two fields of same dimensionality. If one field's scoping has overall location, then these field's values are applied on the entire other field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes component-wise fraction between two fields of same + dimensionality. If one field's scoping has overall + location, then these field's values are applied on the + entire other field.When using a constant or + 'work_by_index', it's possible to use 'inplace' to reuse + one of the fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "component_wise_divide") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="component_wise_divide", server=server) @property def inputs(self): @@ -68,119 +118,115 @@ def inputs(self): Returns -------- - inputs : InputsComponentWiseDivide + inputs : InputsComponentWiseDivide """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsComponentWiseDivide + outputs : OutputsComponentWiseDivide """ return super().outputs -#internal name: component_wise_divide -#scripting name: component_wise_divide class InputsComponentWiseDivide(_Inputs): - """Intermediate class used to connect user inputs to component_wise_divide operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.component_wise_divide() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + component_wise_divide operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.component_wise_divide() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(component_wise_divide._spec().inputs, op) - self._fieldA = Input(component_wise_divide._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(component_wise_divide._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(component_wise_divide._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(component_wise_divide._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.component_wise_divide() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.component_wise_divide() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsComponentWiseDivide(_Outputs): - """Intermediate class used to get outputs from component_wise_divide operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.component_wise_divide() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + component_wise_divide operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.component_wise_divide() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(component_wise_divide._spec().outputs, op) - self._field = Output(component_wise_divide._spec().output_pin(0), 0, op) + self._field = Output(component_wise_divide._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.component_wise_divide() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/component_wise_divide_fc.py b/ansys/dpf/core/operators/math/component_wise_divide_fc.py index 6bd41a38da7..039dd3a96a2 100644 --- a/ansys/dpf/core/operators/math/component_wise_divide_fc.py +++ b/ansys/dpf/core/operators/math/component_wise_divide_fc.py @@ -1,66 +1,111 @@ """ component_wise_divide_fc -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class component_wise_divide_fc(Operator): - """For every two fields with the same label space (from the two input fields containers), computes component-wise fraction between two fields of same dimensionality. If one field's scoping has overall location, then these field's values are applied on the entire other field. - - available inputs: - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.component_wise_divide_fc() - - >>> # Make input connections - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.component_wise_divide_fc(fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB) + """For every two fields with the same label space (from the two input + fields containers), computes component-wise fraction between two + fields of same dimensionality. If one field's scoping has overall + location, then these field's values are applied on the entire + other field. + + Parameters + ---------- + fields_containerA : FieldsContainer + fields_containerB : FieldsContainer + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.component_wise_divide_fc() + + >>> # Make input connections + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.component_wise_divide_fc( + ... fields_containerA=my_fields_containerA, + ... fields_containerB=my_fields_containerB, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_containerA=None, fields_containerB=None, config=None, server=None): - super().__init__(name="component_wise_divide_fc", config = config, server = server) + def __init__( + self, fields_containerA=None, fields_containerB=None, config=None, server=None + ): + super().__init__(name="component_wise_divide_fc", config=config, server=server) self._inputs = InputsComponentWiseDivideFc(self) self._outputs = OutputsComponentWiseDivideFc(self) - if fields_containerA !=None: + if fields_containerA is not None: self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: + if fields_containerB is not None: self.inputs.fields_containerB.connect(fields_containerB) @staticmethod def _spec(): - spec = Specification(description="""For every two fields with the same label space (from the two input fields containers), computes component-wise fraction between two fields of same dimensionality. If one field's scoping has overall location, then these field's values are applied on the entire other field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """For every two fields with the same label space (from the two input + fields containers), computes component-wise fraction + between two fields of same dimensionality. If one field's + scoping has overall location, then these field's values + are applied on the entire other field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_containerA", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fields_containerB", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "component_wise_divide_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="component_wise_divide_fc", server=server) @property def inputs(self): @@ -68,115 +113,115 @@ def inputs(self): Returns -------- - inputs : InputsComponentWiseDivideFc + inputs : InputsComponentWiseDivideFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsComponentWiseDivideFc + outputs : OutputsComponentWiseDivideFc """ return super().outputs -#internal name: component_wise_divide_fc -#scripting name: component_wise_divide_fc class InputsComponentWiseDivideFc(_Inputs): - """Intermediate class used to connect user inputs to component_wise_divide_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.component_wise_divide_fc() - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) + """Intermediate class used to connect user inputs to + component_wise_divide_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.component_wise_divide_fc() + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) """ + def __init__(self, op: Operator): super().__init__(component_wise_divide_fc._spec().inputs, op) - self._fields_containerA = Input(component_wise_divide_fc._spec().input_pin(0), 0, op, -1) + self._fields_containerA = Input( + component_wise_divide_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(component_wise_divide_fc._spec().input_pin(1), 1, op, -1) + self._fields_containerB = Input( + component_wise_divide_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerB) @property def fields_containerA(self): - """Allows to connect fields_containerA input to the operator + """Allows to connect fields_containerA input to the operator. Parameters ---------- - my_fields_containerA : FieldsContainer, + my_fields_containerA : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.component_wise_divide_fc() >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or + >>> # or >>> op.inputs.fields_containerA(my_fields_containerA) - """ return self._fields_containerA @property def fields_containerB(self): - """Allows to connect fields_containerB input to the operator + """Allows to connect fields_containerB input to the operator. Parameters ---------- - my_fields_containerB : FieldsContainer, + my_fields_containerB : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.component_wise_divide_fc() >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or + >>> # or >>> op.inputs.fields_containerB(my_fields_containerB) - """ return self._fields_containerB + class OutputsComponentWiseDivideFc(_Outputs): - """Intermediate class used to get outputs from component_wise_divide_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.component_wise_divide_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + component_wise_divide_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.component_wise_divide_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(component_wise_divide_fc._spec().outputs, op) - self._fields_container = Output(component_wise_divide_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + component_wise_divide_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.component_wise_divide_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/conjugate.py b/ansys/dpf/core/operators/math/conjugate.py index ddeae28fea9..74f22349e0f 100644 --- a/ansys/dpf/core/operators/math/conjugate.py +++ b/ansys/dpf/core/operators/math/conjugate.py @@ -1,60 +1,91 @@ """ conjugate -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class conjugate(Operator): - """Computes element-wise conjugate of field containers containing complex fields. + """Computes element-wise conjugate of field containers containing complex + fields. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.conjugate() - >>> # Instantiate operator - >>> op = dpf.operators.math.conjugate() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.conjugate( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.conjugate(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="conjugate", config = config, server = server) + super().__init__(name="conjugate", config=config, server=server) self._inputs = InputsConjugate(self) self._outputs = OutputsConjugate(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise conjugate of field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes element-wise conjugate of field containers containing complex + fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "conjugate") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="conjugate", server=server) @property def inputs(self): @@ -62,91 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsConjugate + inputs : InputsConjugate """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsConjugate + outputs : OutputsConjugate """ return super().outputs -#internal name: conjugate -#scripting name: conjugate class InputsConjugate(_Inputs): - """Intermediate class used to connect user inputs to conjugate operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.conjugate() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + conjugate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.conjugate() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(conjugate._spec().inputs, op) - self._fields_container = Input(conjugate._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(conjugate._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.conjugate() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsConjugate(_Outputs): - """Intermediate class used to get outputs from conjugate operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.conjugate() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + conjugate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.conjugate() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(conjugate._spec().outputs, op) - self._fields_container = Output(conjugate._spec().output_pin(0), 0, op) + self._fields_container = Output(conjugate._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.conjugate() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/correlation.py b/ansys/dpf/core/operators/math/correlation.py new file mode 100644 index 00000000000..5254efa3649 --- /dev/null +++ b/ansys/dpf/core/operators/math/correlation.py @@ -0,0 +1,276 @@ +""" +correlation +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class correlation(Operator): + """take two fields and a weighting and compute their correlation: + aMb/(||aMa||.||bMb||) + + Parameters + ---------- + fieldA : Field or float + fieldB : Field or FieldsContainer + ponderation : Field + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.correlation() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> my_ponderation = dpf.Field() + >>> op.inputs.ponderation.connect(my_ponderation) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.correlation( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ponderation=my_ponderation, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + >>> result_index = op.outputs.index() + """ + + def __init__( + self, fieldA=None, fieldB=None, ponderation=None, config=None, server=None + ): + super().__init__(name="correlation", config=config, server=server) + self._inputs = InputsCorrelation(self) + self._outputs = OutputsCorrelation(self) + if fieldA is not None: + self.inputs.fieldA.connect(fieldA) + if fieldB is not None: + self.inputs.fieldB.connect(fieldB) + if ponderation is not None: + self.inputs.ponderation.connect(ponderation) + + @staticmethod + def _spec(): + description = """take two fields and a weighting and compute their correlation: + aMb/(||aMa||.||bMb||)""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "double", "vector"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="ponderation", + type_names=["field"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="index", + type_names=["int32"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="correlation", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsCorrelation + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsCorrelation + """ + return super().outputs + + +class InputsCorrelation(_Inputs): + """Intermediate class used to connect user inputs to + correlation operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.correlation() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> my_ponderation = dpf.Field() + >>> op.inputs.ponderation.connect(my_ponderation) + """ + + def __init__(self, op: Operator): + super().__init__(correlation._spec().inputs, op) + self._fieldA = Input(correlation._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fieldA) + self._fieldB = Input(correlation._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._fieldB) + self._ponderation = Input(correlation._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._ponderation) + + @property + def fieldA(self): + """Allows to connect fieldA input to the operator. + + Parameters + ---------- + my_fieldA : Field or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.correlation() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> # or + >>> op.inputs.fieldA(my_fieldA) + """ + return self._fieldA + + @property + def fieldB(self): + """Allows to connect fieldB input to the operator. + + Parameters + ---------- + my_fieldB : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.correlation() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> # or + >>> op.inputs.fieldB(my_fieldB) + """ + return self._fieldB + + @property + def ponderation(self): + """Allows to connect ponderation input to the operator. + + Parameters + ---------- + my_ponderation : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.correlation() + >>> op.inputs.ponderation.connect(my_ponderation) + >>> # or + >>> op.inputs.ponderation(my_ponderation) + """ + return self._ponderation + + +class OutputsCorrelation(_Outputs): + """Intermediate class used to get outputs from + correlation operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.correlation() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + >>> result_index = op.outputs.index() + """ + + def __init__(self, op: Operator): + super().__init__(correlation._spec().outputs, op) + self._field = Output(correlation._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + self._index = Output(correlation._spec().output_pin(1), 1, op) + self._outputs.append(self._index) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.correlation() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field + + @property + def index(self): + """Allows to get index output of the operator + + Returns + ---------- + my_index : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.correlation() + >>> # Connect inputs : op.inputs. ... + >>> result_index = op.outputs.index() + """ # noqa: E501 + return self._index diff --git a/ansys/dpf/core/operators/math/cos.py b/ansys/dpf/core/operators/math/cos.py index aa025e7f8ef..6044503d6ae 100644 --- a/ansys/dpf/core/operators/math/cos.py +++ b/ansys/dpf/core/operators/math/cos.py @@ -1,60 +1,92 @@ """ cos -=== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class cos(Operator): """Computes element-wise cos(field[i]). - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.cos() - >>> # Instantiate operator - >>> op = dpf.operators.math.cos() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.cos( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.cos(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="cos", config = config, server = server) + super().__init__(name="cos", config=config, server=server) self._inputs = InputsCos(self) self._outputs = OutputsCos(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise cos(field[i]).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes element-wise cos(field[i]).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cos") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cos", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsCos + inputs : InputsCos """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCos + outputs : OutputsCos """ return super().outputs -#internal name: cos -#scripting name: cos class InputsCos(_Inputs): - """Intermediate class used to connect user inputs to cos operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cos() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + cos operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cos() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(cos._spec().inputs, op) - self._field = Input(cos._spec().input_pin(0), 0, op, -1) + self._field = Input(cos._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cos() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsCos(_Outputs): - """Intermediate class used to get outputs from cos operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cos() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + cos operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cos() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(cos._spec().outputs, op) - self._field = Output(cos._spec().output_pin(0), 0, op) + self._field = Output(cos._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cos() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/cos_fc.py b/ansys/dpf/core/operators/math/cos_fc.py index 877e5d8eb4f..4e9f8c733a8 100644 --- a/ansys/dpf/core/operators/math/cos_fc.py +++ b/ansys/dpf/core/operators/math/cos_fc.py @@ -1,60 +1,92 @@ """ cos_fc -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class cos_fc(Operator): """Computes element-wise cos(field[i]). - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.cos_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.cos_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.cos_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.cos_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="cos_fc", config = config, server = server) + super().__init__(name="cos_fc", config=config, server=server) self._inputs = InputsCosFc(self) self._outputs = OutputsCosFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise cos(field[i]).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes element-wise cos(field[i]).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cos_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cos_fc", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsCosFc + inputs : InputsCosFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCosFc + outputs : OutputsCosFc """ return super().outputs -#internal name: cos_fc -#scripting name: cos_fc class InputsCosFc(_Inputs): - """Intermediate class used to connect user inputs to cos_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cos_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + cos_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cos_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(cos_fc._spec().inputs, op) - self._fields_container = Input(cos_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(cos_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cos_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsCosFc(_Outputs): - """Intermediate class used to get outputs from cos_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cos_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cos_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cos_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cos_fc._spec().outputs, op) - self._fields_container = Output(cos_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(cos_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cos_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/cplx_add.py b/ansys/dpf/core/operators/math/cplx_add.py deleted file mode 100644 index 219c6b05b39..00000000000 --- a/ansys/dpf/core/operators/math/cplx_add.py +++ /dev/null @@ -1,182 +0,0 @@ -""" -cplx_add -======== -""" -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type -from ansys.dpf.core.operators.specification import PinSpecification, Specification - -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" - -class cplx_add(Operator): - """Computes addition between two field containers containing complex fields. - - available inputs: - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.cplx_add() - - >>> # Make input connections - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.cplx_add(fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_containerA=None, fields_containerB=None, config=None, server=None): - super().__init__(name="cplx_add", config = config, server = server) - self._inputs = InputsCplxAdd(self) - self._outputs = OutputsCplxAdd(self) - if fields_containerA !=None: - self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: - self.inputs.fields_containerB.connect(fields_containerB) - - @staticmethod - def _spec(): - spec = Specification(description="""Computes addition between two field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) - return spec - - - @staticmethod - def default_config(): - return Operator.default_config(name = "cplx_add") - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsCplxAdd - """ - return super().inputs - - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsCplxAdd - """ - return super().outputs - - -#internal name: cplx_add -#scripting name: cplx_add -class InputsCplxAdd(_Inputs): - """Intermediate class used to connect user inputs to cplx_add operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_add() - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - """ - def __init__(self, op: Operator): - super().__init__(cplx_add._spec().inputs, op) - self._fields_containerA = Input(cplx_add._spec().input_pin(0), 0, op, -1) - self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(cplx_add._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._fields_containerB) - - @property - def fields_containerA(self): - """Allows to connect fields_containerA input to the operator - - Parameters - ---------- - my_fields_containerA : FieldsContainer, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_add() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or - >>> op.inputs.fields_containerA(my_fields_containerA) - - """ - return self._fields_containerA - - @property - def fields_containerB(self): - """Allows to connect fields_containerB input to the operator - - Parameters - ---------- - my_fields_containerB : FieldsContainer, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_add() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or - >>> op.inputs.fields_containerB(my_fields_containerB) - - """ - return self._fields_containerB - -class OutputsCplxAdd(_Outputs): - """Intermediate class used to get outputs from cplx_add operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_add() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - def __init__(self, op: Operator): - super().__init__(cplx_add._spec().outputs, op) - self._fields_container = Output(cplx_add._spec().output_pin(0), 0, op) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - - Returns - ---------- - my_fields_container : FieldsContainer, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_add() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - return self._fields_container - diff --git a/ansys/dpf/core/operators/math/cplx_derive.py b/ansys/dpf/core/operators/math/cplx_derive.py index bc3e0db3363..172f69b6f34 100644 --- a/ansys/dpf/core/operators/math/cplx_derive.py +++ b/ansys/dpf/core/operators/math/cplx_derive.py @@ -1,60 +1,89 @@ """ cplx_derive -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class cplx_derive(Operator): """Derive field containers containing complex fields. - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.cplx_derive() - >>> # Instantiate operator - >>> op = dpf.operators.math.cplx_derive() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.cplx_derive( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.cplx_derive(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="cplx_derive", config = config, server = server) + super().__init__(name="cplx_derive", config=config, server=server) self._inputs = InputsCplxDerive(self) self._outputs = OutputsCplxDerive(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Derive field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Derive field containers containing complex fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cplx_derive") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cplx_derive", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsCplxDerive + inputs : InputsCplxDerive """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCplxDerive + outputs : OutputsCplxDerive """ return super().outputs -#internal name: cplx_derive -#scripting name: cplx_derive class InputsCplxDerive(_Inputs): - """Intermediate class used to connect user inputs to cplx_derive operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_derive() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + cplx_derive operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cplx_derive() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(cplx_derive._spec().inputs, op) - self._fields_container = Input(cplx_derive._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(cplx_derive._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_derive() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsCplxDerive(_Outputs): - """Intermediate class used to get outputs from cplx_derive operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_derive() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cplx_derive operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cplx_derive() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cplx_derive._spec().outputs, op) - self._fields_container = Output(cplx_derive._spec().output_pin(0), 0, op) + self._fields_container = Output(cplx_derive._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_derive() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/cplx_divide.py b/ansys/dpf/core/operators/math/cplx_divide.py index c2b9825eb46..c2a5172ed89 100644 --- a/ansys/dpf/core/operators/math/cplx_divide.py +++ b/ansys/dpf/core/operators/math/cplx_divide.py @@ -1,66 +1,105 @@ """ cplx_divide -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class cplx_divide(Operator): - """Computes division between two field containers containing complex fields. + """Computes division between two field containers containing complex + fields. + + Parameters + ---------- + fields_containerA : FieldsContainer + fields_containerB : FieldsContainer - available inputs: - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.cplx_divide() - >>> # Instantiate operator - >>> op = dpf.operators.math.cplx_divide() + >>> # Make input connections + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> # Make input connections - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.cplx_divide( + ... fields_containerA=my_fields_containerA, + ... fields_containerB=my_fields_containerB, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.cplx_divide(fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_containerA=None, fields_containerB=None, config=None, server=None): - super().__init__(name="cplx_divide", config = config, server = server) + def __init__( + self, fields_containerA=None, fields_containerB=None, config=None, server=None + ): + super().__init__(name="cplx_divide", config=config, server=server) self._inputs = InputsCplxDivide(self) self._outputs = OutputsCplxDivide(self) - if fields_containerA !=None: + if fields_containerA is not None: self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: + if fields_containerB is not None: self.inputs.fields_containerB.connect(fields_containerB) @staticmethod def _spec(): - spec = Specification(description="""Computes division between two field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes division between two field containers containing complex + fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_containerA", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fields_containerB", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cplx_divide") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cplx_divide", server=server) @property def inputs(self): @@ -68,115 +107,109 @@ def inputs(self): Returns -------- - inputs : InputsCplxDivide + inputs : InputsCplxDivide """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCplxDivide + outputs : OutputsCplxDivide """ return super().outputs -#internal name: cplx_divide -#scripting name: cplx_divide class InputsCplxDivide(_Inputs): - """Intermediate class used to connect user inputs to cplx_divide operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_divide() - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) + """Intermediate class used to connect user inputs to + cplx_divide operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cplx_divide() + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) """ + def __init__(self, op: Operator): super().__init__(cplx_divide._spec().inputs, op) - self._fields_containerA = Input(cplx_divide._spec().input_pin(0), 0, op, -1) + self._fields_containerA = Input(cplx_divide._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(cplx_divide._spec().input_pin(1), 1, op, -1) + self._fields_containerB = Input(cplx_divide._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fields_containerB) @property def fields_containerA(self): - """Allows to connect fields_containerA input to the operator + """Allows to connect fields_containerA input to the operator. Parameters ---------- - my_fields_containerA : FieldsContainer, + my_fields_containerA : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_divide() >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or + >>> # or >>> op.inputs.fields_containerA(my_fields_containerA) - """ return self._fields_containerA @property def fields_containerB(self): - """Allows to connect fields_containerB input to the operator + """Allows to connect fields_containerB input to the operator. Parameters ---------- - my_fields_containerB : FieldsContainer, + my_fields_containerB : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_divide() >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or + >>> # or >>> op.inputs.fields_containerB(my_fields_containerB) - """ return self._fields_containerB + class OutputsCplxDivide(_Outputs): - """Intermediate class used to get outputs from cplx_divide operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_divide() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cplx_divide operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cplx_divide() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cplx_divide._spec().outputs, op) - self._fields_container = Output(cplx_divide._spec().output_pin(0), 0, op) + self._fields_container = Output(cplx_divide._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_divide() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/cplx_dot.py b/ansys/dpf/core/operators/math/cplx_dot.py index 2ffaef4be8f..9e820631617 100644 --- a/ansys/dpf/core/operators/math/cplx_dot.py +++ b/ansys/dpf/core/operators/math/cplx_dot.py @@ -1,66 +1,105 @@ """ cplx_dot -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class cplx_dot(Operator): - """Computes product between two field containers containing complex fields. + """Computes product between two field containers containing complex + fields. + + Parameters + ---------- + fields_containerA : FieldsContainer + fields_containerB : FieldsContainer - available inputs: - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.cplx_dot() - >>> # Instantiate operator - >>> op = dpf.operators.math.cplx_dot() + >>> # Make input connections + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> # Make input connections - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.cplx_dot( + ... fields_containerA=my_fields_containerA, + ... fields_containerB=my_fields_containerB, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.cplx_dot(fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_containerA=None, fields_containerB=None, config=None, server=None): - super().__init__(name="cplx_dot", config = config, server = server) + def __init__( + self, fields_containerA=None, fields_containerB=None, config=None, server=None + ): + super().__init__(name="cplx_dot", config=config, server=server) self._inputs = InputsCplxDot(self) self._outputs = OutputsCplxDot(self) - if fields_containerA !=None: + if fields_containerA is not None: self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: + if fields_containerB is not None: self.inputs.fields_containerB.connect(fields_containerB) @staticmethod def _spec(): - spec = Specification(description="""Computes product between two field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes product between two field containers containing complex + fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_containerA", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fields_containerB", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cplx_dot") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cplx_dot", server=server) @property def inputs(self): @@ -68,115 +107,109 @@ def inputs(self): Returns -------- - inputs : InputsCplxDot + inputs : InputsCplxDot """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCplxDot + outputs : OutputsCplxDot """ return super().outputs -#internal name: cplx_dot -#scripting name: cplx_dot class InputsCplxDot(_Inputs): - """Intermediate class used to connect user inputs to cplx_dot operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_dot() - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) + """Intermediate class used to connect user inputs to + cplx_dot operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cplx_dot() + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) """ + def __init__(self, op: Operator): super().__init__(cplx_dot._spec().inputs, op) - self._fields_containerA = Input(cplx_dot._spec().input_pin(0), 0, op, -1) + self._fields_containerA = Input(cplx_dot._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(cplx_dot._spec().input_pin(1), 1, op, -1) + self._fields_containerB = Input(cplx_dot._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fields_containerB) @property def fields_containerA(self): - """Allows to connect fields_containerA input to the operator + """Allows to connect fields_containerA input to the operator. Parameters ---------- - my_fields_containerA : FieldsContainer, + my_fields_containerA : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_dot() >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or + >>> # or >>> op.inputs.fields_containerA(my_fields_containerA) - """ return self._fields_containerA @property def fields_containerB(self): - """Allows to connect fields_containerB input to the operator + """Allows to connect fields_containerB input to the operator. Parameters ---------- - my_fields_containerB : FieldsContainer, + my_fields_containerB : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_dot() >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or + >>> # or >>> op.inputs.fields_containerB(my_fields_containerB) - """ return self._fields_containerB + class OutputsCplxDot(_Outputs): - """Intermediate class used to get outputs from cplx_dot operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_dot() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cplx_dot operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cplx_dot() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cplx_dot._spec().outputs, op) - self._fields_container = Output(cplx_dot._spec().output_pin(0), 0, op) + self._fields_container = Output(cplx_dot._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_dot() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/cplx_multiply.py b/ansys/dpf/core/operators/math/cplx_multiply.py index 7defe65dd27..b06ac9d8682 100644 --- a/ansys/dpf/core/operators/math/cplx_multiply.py +++ b/ansys/dpf/core/operators/math/cplx_multiply.py @@ -1,66 +1,105 @@ """ cplx_multiply -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class cplx_multiply(Operator): - """Computes multiply between two field containers containing complex fields. + """Computes multiply between two field containers containing complex + fields. + + Parameters + ---------- + fields_containerA : FieldsContainer + fields_containerB : FieldsContainer - available inputs: - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.cplx_multiply() - >>> # Instantiate operator - >>> op = dpf.operators.math.cplx_multiply() + >>> # Make input connections + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> # Make input connections - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.cplx_multiply( + ... fields_containerA=my_fields_containerA, + ... fields_containerB=my_fields_containerB, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.cplx_multiply(fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_containerA=None, fields_containerB=None, config=None, server=None): - super().__init__(name="cplx_multiply", config = config, server = server) + def __init__( + self, fields_containerA=None, fields_containerB=None, config=None, server=None + ): + super().__init__(name="cplx_multiply", config=config, server=server) self._inputs = InputsCplxMultiply(self) self._outputs = OutputsCplxMultiply(self) - if fields_containerA !=None: + if fields_containerA is not None: self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: + if fields_containerB is not None: self.inputs.fields_containerB.connect(fields_containerB) @staticmethod def _spec(): - spec = Specification(description="""Computes multiply between two field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes multiply between two field containers containing complex + fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_containerA", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fields_containerB", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cplx_multiply") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cplx_multiply", server=server) @property def inputs(self): @@ -68,115 +107,109 @@ def inputs(self): Returns -------- - inputs : InputsCplxMultiply + inputs : InputsCplxMultiply """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCplxMultiply + outputs : OutputsCplxMultiply """ return super().outputs -#internal name: cplx_multiply -#scripting name: cplx_multiply class InputsCplxMultiply(_Inputs): - """Intermediate class used to connect user inputs to cplx_multiply operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_multiply() - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) + """Intermediate class used to connect user inputs to + cplx_multiply operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cplx_multiply() + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) """ + def __init__(self, op: Operator): super().__init__(cplx_multiply._spec().inputs, op) - self._fields_containerA = Input(cplx_multiply._spec().input_pin(0), 0, op, -1) + self._fields_containerA = Input(cplx_multiply._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(cplx_multiply._spec().input_pin(1), 1, op, -1) + self._fields_containerB = Input(cplx_multiply._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fields_containerB) @property def fields_containerA(self): - """Allows to connect fields_containerA input to the operator + """Allows to connect fields_containerA input to the operator. Parameters ---------- - my_fields_containerA : FieldsContainer, + my_fields_containerA : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_multiply() >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or + >>> # or >>> op.inputs.fields_containerA(my_fields_containerA) - """ return self._fields_containerA @property def fields_containerB(self): - """Allows to connect fields_containerB input to the operator + """Allows to connect fields_containerB input to the operator. Parameters ---------- - my_fields_containerB : FieldsContainer, + my_fields_containerB : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_multiply() >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or + >>> # or >>> op.inputs.fields_containerB(my_fields_containerB) - """ return self._fields_containerB + class OutputsCplxMultiply(_Outputs): - """Intermediate class used to get outputs from cplx_multiply operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.cplx_multiply() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cplx_multiply operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cplx_multiply() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cplx_multiply._spec().outputs, op) - self._fields_container = Output(cplx_multiply._spec().output_pin(0), 0, op) + self._fields_container = Output(cplx_multiply._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.cplx_multiply() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/dot.py b/ansys/dpf/core/operators/math/dot.py index 87fbbe4c455..df84297c011 100644 --- a/ansys/dpf/core/operators/math/dot.py +++ b/ansys/dpf/core/operators/math/dot.py @@ -1,66 +1,116 @@ """ dot -=== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class dot(Operator): - """Computes element-wise dot product between two vector fields. If one field's scoping has 'overall' location, then this field's values are applied on the entire other field. - - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.dot() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.dot(fieldA=my_fieldA,fieldB=my_fieldB) + """Computes element-wise dot product between two vector fields. If one + field's scoping has 'overall' location, then this field's values + are applied on the entire other field.When using a constant or + 'work_by_index', it's possible to use 'inplace' to reuse one of + the fields, but only in the case where both fields are scalar. + + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.dot() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.dot( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="dot", config = config, server = server) + super().__init__(name="dot", config=config, server=server) self._inputs = InputsDot(self) self._outputs = OutputsDot(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise dot product between two vector fields. If one field's scoping has 'overall' location, then this field's values are applied on the entire other field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes element-wise dot product between two vector fields. If one + field's scoping has 'overall' location, then this field's + values are applied on the entire other field.When using a + constant or 'work_by_index', it's possible to use + 'inplace' to reuse one of the fields, but only in the case + where both fields are scalar.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "dot") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="dot", server=server) @property def inputs(self): @@ -68,119 +118,115 @@ def inputs(self): Returns -------- - inputs : InputsDot + inputs : InputsDot """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsDot + outputs : OutputsDot """ return super().outputs -#internal name: dot -#scripting name: dot class InputsDot(_Inputs): - """Intermediate class used to connect user inputs to dot operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.dot() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + dot operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.dot() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(dot._spec().inputs, op) - self._fieldA = Input(dot._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(dot._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(dot._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(dot._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.dot() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.dot() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsDot(_Outputs): - """Intermediate class used to get outputs from dot operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.dot() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + dot operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.dot() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(dot._spec().outputs, op) - self._field = Output(dot._spec().output_pin(0), 0, op) + self._field = Output(dot._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.dot() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/dot_tensor.py b/ansys/dpf/core/operators/math/dot_tensor.py index fd63cc95ffc..9476257c81c 100644 --- a/ansys/dpf/core/operators/math/dot_tensor.py +++ b/ansys/dpf/core/operators/math/dot_tensor.py @@ -1,66 +1,107 @@ """ dot_tensor -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class dot_tensor(Operator): """Computes element-wise dot product between two tensor fields. - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.dot_tensor() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.dot_tensor(fieldA=my_fieldA,fieldB=my_fieldB) + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.dot_tensor() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.dot_tensor( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="dot_tensor", config = config, server = server) + super().__init__(name="dot_tensor", config=config, server=server) self._inputs = InputsDotTensor(self) self._outputs = OutputsDotTensor(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise dot product between two tensor fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes element-wise dot product between two tensor fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "dot_tensor") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="dot_tensor", server=server) @property def inputs(self): @@ -68,119 +109,115 @@ def inputs(self): Returns -------- - inputs : InputsDotTensor + inputs : InputsDotTensor """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsDotTensor + outputs : OutputsDotTensor """ return super().outputs -#internal name: dot_tensor -#scripting name: dot_tensor class InputsDotTensor(_Inputs): - """Intermediate class used to connect user inputs to dot_tensor operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.dot_tensor() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + dot_tensor operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.dot_tensor() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(dot_tensor._spec().inputs, op) - self._fieldA = Input(dot_tensor._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(dot_tensor._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(dot_tensor._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(dot_tensor._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.dot_tensor() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.dot_tensor() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsDotTensor(_Outputs): - """Intermediate class used to get outputs from dot_tensor operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.dot_tensor() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + dot_tensor operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.dot_tensor() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(dot_tensor._spec().outputs, op) - self._field = Output(dot_tensor._spec().output_pin(0), 0, op) + self._field = Output(dot_tensor._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.dot_tensor() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/entity_extractor.py b/ansys/dpf/core/operators/math/entity_extractor.py new file mode 100644 index 00000000000..c8849f5c521 --- /dev/null +++ b/ansys/dpf/core/operators/math/entity_extractor.py @@ -0,0 +1,211 @@ +""" +entity_extractor +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class entity_extractor(Operator): + """Extract an entity from a field, based on its Id. + + Parameters + ---------- + fieldA : Field + scalar_int : int + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.entity_extractor() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_scalar_int = int() + >>> op.inputs.scalar_int.connect(my_scalar_int) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.entity_extractor( + ... fieldA=my_fieldA, + ... scalar_int=my_scalar_int, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, fieldA=None, scalar_int=None, config=None, server=None): + super().__init__(name="entity_extractor", config=config, server=server) + self._inputs = InputsEntityExtractor(self) + self._outputs = OutputsEntityExtractor(self) + if fieldA is not None: + self.inputs.fieldA.connect(fieldA) + if scalar_int is not None: + self.inputs.scalar_int.connect(scalar_int) + + @staticmethod + def _spec(): + description = """Extract an entity from a field, based on its Id.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="scalar_int", + type_names=["int32"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="entity_extractor", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsEntityExtractor + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsEntityExtractor + """ + return super().outputs + + +class InputsEntityExtractor(_Inputs): + """Intermediate class used to connect user inputs to + entity_extractor operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.entity_extractor() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_scalar_int = int() + >>> op.inputs.scalar_int.connect(my_scalar_int) + """ + + def __init__(self, op: Operator): + super().__init__(entity_extractor._spec().inputs, op) + self._fieldA = Input(entity_extractor._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fieldA) + self._scalar_int = Input(entity_extractor._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scalar_int) + + @property + def fieldA(self): + """Allows to connect fieldA input to the operator. + + Parameters + ---------- + my_fieldA : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.entity_extractor() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> # or + >>> op.inputs.fieldA(my_fieldA) + """ + return self._fieldA + + @property + def scalar_int(self): + """Allows to connect scalar_int input to the operator. + + Parameters + ---------- + my_scalar_int : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.entity_extractor() + >>> op.inputs.scalar_int.connect(my_scalar_int) + >>> # or + >>> op.inputs.scalar_int(my_scalar_int) + """ + return self._scalar_int + + +class OutputsEntityExtractor(_Outputs): + """Intermediate class used to get outputs from + entity_extractor operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.entity_extractor() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(entity_extractor._spec().outputs, op) + self._field = Output(entity_extractor._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.entity_extractor() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/math/exponential.py b/ansys/dpf/core/operators/math/exponential.py new file mode 100644 index 00000000000..fefc74630f5 --- /dev/null +++ b/ansys/dpf/core/operators/math/exponential.py @@ -0,0 +1,188 @@ +""" +exponential +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class exponential(Operator): + """Computes element-wise exp(field[i]). + + Parameters + ---------- + field : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.exponential() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.exponential( + ... field=my_field, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, field=None, config=None, server=None): + super().__init__(name="exponential", config=config, server=server) + self._inputs = InputsExponential(self) + self._outputs = OutputsExponential(self) + if field is not None: + self.inputs.field.connect(field) + + @staticmethod + def _spec(): + description = """Computes element-wise exp(field[i]).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="exponential", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsExponential + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsExponential + """ + return super().outputs + + +class InputsExponential(_Inputs): + """Intermediate class used to connect user inputs to + exponential operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.exponential() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + """ + + def __init__(self, op: Operator): + super().__init__(exponential._spec().inputs, op) + self._field = Input(exponential._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._field) + + @property + def field(self): + """Allows to connect field input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_field : Field or FieldsContainer or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.exponential() + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) + """ + return self._field + + +class OutputsExponential(_Outputs): + """Intermediate class used to get outputs from + exponential operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.exponential() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(exponential._spec().outputs, op) + self._field = Output(exponential._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.exponential() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/math/exponential_fc.py b/ansys/dpf/core/operators/math/exponential_fc.py new file mode 100644 index 00000000000..a14e4ea330c --- /dev/null +++ b/ansys/dpf/core/operators/math/exponential_fc.py @@ -0,0 +1,183 @@ +""" +exponential_fc +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class exponential_fc(Operator): + """Computes element-wise exp(field[i]). + + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.exponential_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.exponential_fc( + ... fields_container=my_fields_container, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, fields_container=None, config=None, server=None): + super().__init__(name="exponential_fc", config=config, server=server) + self._inputs = InputsExponentialFc(self) + self._outputs = OutputsExponentialFc(self) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + + @staticmethod + def _spec(): + description = """Computes element-wise exp(field[i]).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="exponential_fc", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsExponentialFc + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsExponentialFc + """ + return super().outputs + + +class InputsExponentialFc(_Inputs): + """Intermediate class used to connect user inputs to + exponential_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.exponential_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + """ + + def __init__(self, op: Operator): + super().__init__(exponential_fc._spec().inputs, op) + self._fields_container = Input(exponential_fc._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.exponential_fc() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + +class OutputsExponentialFc(_Outputs): + """Intermediate class used to get outputs from + exponential_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.exponential_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(exponential_fc._spec().outputs, op) + self._fields_container = Output(exponential_fc._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.exponential_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/math/fft_eval.py b/ansys/dpf/core/operators/math/fft_eval.py index 8ffb00ecc75..0e4a14ca83e 100644 --- a/ansys/dpf/core/operators/math/fft_eval.py +++ b/ansys/dpf/core/operators/math/fft_eval.py @@ -1,69 +1,114 @@ """ fft_eval -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Math plugin, from "math" category -""" class fft_eval(Operator): """Evaluate the fast fourier transforms at a given set of fields. - available inputs: - - field_t (Field) - - time_scoping (Scoping) (optional) - - available outputs: - - field (Field) - - offset (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.fft_eval() - - >>> # Make input connections - >>> my_field_t = dpf.Field() - >>> op.inputs.field_t.connect(my_field_t) - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.fft_eval(field_t=my_field_t,time_scoping=my_time_scoping) + Parameters + ---------- + field_t : Field + Field of values to evaluate + time_scoping : Scoping, optional + If specified only the results at these set + ids are used + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.fft_eval() + + >>> # Make input connections + >>> my_field_t = dpf.Field() + >>> op.inputs.field_t.connect(my_field_t) + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.fft_eval( + ... field_t=my_field_t, + ... time_scoping=my_time_scoping, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + >>> result_offset = op.outputs.offset() + """ - >>> # Get output data - >>> result_field = op.outputs.field() - >>> result_offset = op.outputs.offset()""" def __init__(self, field_t=None, time_scoping=None, config=None, server=None): - super().__init__(name="fft_eval", config = config, server = server) + super().__init__(name="fft_eval", config=config, server=server) self._inputs = InputsFftEval(self) self._outputs = OutputsFftEval(self) - if field_t !=None: + if field_t is not None: self.inputs.field_t.connect(field_t) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) @staticmethod def _spec(): - spec = Specification(description="""Evaluate the fast fourier transforms at a given set of fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field_t", type_names=["field"], optional=False, document="""field of values to evaluate"""), - 1 : PinSpecification(name = "time_scoping", type_names=["scoping"], optional=True, document="""if specified only the results at these set ids are used""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "offset", type_names=["field"], optional=False, document="""""")}) + description = ( + """Evaluate the fast fourier transforms at a given set of fields.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field_t", + type_names=["field"], + optional=False, + document="""Field of values to evaluate""", + ), + 1: PinSpecification( + name="time_scoping", + type_names=["scoping"], + optional=True, + document="""If specified only the results at these set + ids are used""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="offset", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "fft_eval") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="fft_eval", server=server) @property def inputs(self): @@ -71,141 +116,134 @@ def inputs(self): Returns -------- - inputs : InputsFftEval + inputs : InputsFftEval """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFftEval + outputs : OutputsFftEval """ return super().outputs -#internal name: fft_eval -#scripting name: fft_eval class InputsFftEval(_Inputs): - """Intermediate class used to connect user inputs to fft_eval operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.fft_eval() - >>> my_field_t = dpf.Field() - >>> op.inputs.field_t.connect(my_field_t) - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) + """Intermediate class used to connect user inputs to + fft_eval operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_eval() + >>> my_field_t = dpf.Field() + >>> op.inputs.field_t.connect(my_field_t) + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) """ + def __init__(self, op: Operator): super().__init__(fft_eval._spec().inputs, op) - self._field_t = Input(fft_eval._spec().input_pin(0), 0, op, -1) + self._field_t = Input(fft_eval._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field_t) - self._time_scoping = Input(fft_eval._spec().input_pin(1), 1, op, -1) + self._time_scoping = Input(fft_eval._spec().input_pin(1), 1, op, -1) self._inputs.append(self._time_scoping) @property def field_t(self): - """Allows to connect field_t input to the operator + """Allows to connect field_t input to the operator. - - pindoc: field of values to evaluate + Field of values to evaluate Parameters ---------- - my_field_t : Field, + my_field_t : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_eval() >>> op.inputs.field_t.connect(my_field_t) - >>> #or + >>> # or >>> op.inputs.field_t(my_field_t) - """ return self._field_t @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: if specified only the results at these set ids are used + If specified only the results at these set + ids are used Parameters ---------- - my_time_scoping : Scoping, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_eval() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping + class OutputsFftEval(_Outputs): - """Intermediate class used to get outputs from fft_eval operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.fft_eval() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - >>> result_offset = op.outputs.offset() + """Intermediate class used to get outputs from + fft_eval operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_eval() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + >>> result_offset = op.outputs.offset() """ + def __init__(self, op: Operator): super().__init__(fft_eval._spec().outputs, op) - self._field = Output(fft_eval._spec().output_pin(0), 0, op) + self._field = Output(fft_eval._spec().output_pin(0), 0, op) self._outputs.append(self._field) - self._offset = Output(fft_eval._spec().output_pin(2), 2, op) + self._offset = Output(fft_eval._spec().output_pin(2), 2, op) self._outputs.append(self._offset) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_eval() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field @property def offset(self): """Allows to get offset output of the operator - Returns ---------- - my_offset : Field, + my_offset : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_eval() >>> # Connect inputs : op.inputs. ... - >>> result_offset = op.outputs.offset() - """ + >>> result_offset = op.outputs.offset() + """ # noqa: E501 return self._offset - diff --git a/ansys/dpf/core/operators/math/fft_gradient_eval.py b/ansys/dpf/core/operators/math/fft_gradient_eval.py index 200382a3991..075eeeb5375 100644 --- a/ansys/dpf/core/operators/math/fft_gradient_eval.py +++ b/ansys/dpf/core/operators/math/fft_gradient_eval.py @@ -1,72 +1,126 @@ """ fft_gradient_eval -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Math plugin, from "math" category -""" class fft_gradient_eval(Operator): - """Evaluate min max based on the fast fourier transform at a given field, using gradient method for adaptative time step. - - available inputs: - - fields_container (FieldsContainer) - - time_scoping (Scoping) (optional) - - fs_ratio (int) (optional) - - available outputs: - - coefficients (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.fft_gradient_eval() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_fs_ratio = int() - >>> op.inputs.fs_ratio.connect(my_fs_ratio) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.fft_gradient_eval(fields_container=my_fields_container,time_scoping=my_time_scoping,fs_ratio=my_fs_ratio) - - >>> # Get output data - >>> result_coefficients = op.outputs.coefficients()""" - def __init__(self, fields_container=None, time_scoping=None, fs_ratio=None, config=None, server=None): - super().__init__(name="fft_eval_gr", config = config, server = server) + """Evaluate min max based on the fast fourier transform at a given field, + using gradient method for adaptative time step. + + Parameters + ---------- + fields_container : FieldsContainer + time_scoping : Scoping, optional + If specified only the results at these set + ids are used + fs_ratio : int, optional + Default value = 20 + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.fft_gradient_eval() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_fs_ratio = int() + >>> op.inputs.fs_ratio.connect(my_fs_ratio) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.fft_gradient_eval( + ... fields_container=my_fields_container, + ... time_scoping=my_time_scoping, + ... fs_ratio=my_fs_ratio, + ... ) + + >>> # Get output data + >>> result_coefficients = op.outputs.coefficients() + """ + + def __init__( + self, + fields_container=None, + time_scoping=None, + fs_ratio=None, + config=None, + server=None, + ): + super().__init__(name="fft_eval_gr", config=config, server=server) self._inputs = InputsFftGradientEval(self) self._outputs = OutputsFftGradientEval(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if fs_ratio !=None: + if fs_ratio is not None: self.inputs.fs_ratio.connect(fs_ratio) @staticmethod def _spec(): - spec = Specification(description="""Evaluate min max based on the fast fourier transform at a given field, using gradient method for adaptative time step.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "time_scoping", type_names=["scoping"], optional=True, document="""if specified only the results at these set ids are used"""), - 2 : PinSpecification(name = "fs_ratio", type_names=["int32"], optional=True, document="""default value = 20""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "coefficients", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluate min max based on the fast fourier transform at a given field, + using gradient method for adaptative time step.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="time_scoping", + type_names=["scoping"], + optional=True, + document="""If specified only the results at these set + ids are used""", + ), + 2: PinSpecification( + name="fs_ratio", + type_names=["int32"], + optional=True, + document="""Default value = 20""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="coefficients", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "fft_eval_gr") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="fft_eval_gr", server=server) @property def inputs(self): @@ -74,143 +128,138 @@ def inputs(self): Returns -------- - inputs : InputsFftGradientEval + inputs : InputsFftGradientEval """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFftGradientEval + outputs : OutputsFftGradientEval """ return super().outputs -#internal name: fft_eval_gr -#scripting name: fft_gradient_eval class InputsFftGradientEval(_Inputs): - """Intermediate class used to connect user inputs to fft_gradient_eval operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.fft_gradient_eval() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_fs_ratio = int() - >>> op.inputs.fs_ratio.connect(my_fs_ratio) + """Intermediate class used to connect user inputs to + fft_gradient_eval operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_gradient_eval() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_fs_ratio = int() + >>> op.inputs.fs_ratio.connect(my_fs_ratio) """ + def __init__(self, op: Operator): super().__init__(fft_gradient_eval._spec().inputs, op) - self._fields_container = Input(fft_gradient_eval._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + fft_gradient_eval._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._time_scoping = Input(fft_gradient_eval._spec().input_pin(1), 1, op, -1) + self._time_scoping = Input(fft_gradient_eval._spec().input_pin(1), 1, op, -1) self._inputs.append(self._time_scoping) - self._fs_ratio = Input(fft_gradient_eval._spec().input_pin(2), 2, op, -1) + self._fs_ratio = Input(fft_gradient_eval._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fs_ratio) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_gradient_eval() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: if specified only the results at these set ids are used + If specified only the results at these set + ids are used Parameters ---------- - my_time_scoping : Scoping, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_gradient_eval() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def fs_ratio(self): - """Allows to connect fs_ratio input to the operator + """Allows to connect fs_ratio input to the operator. - - pindoc: default value = 20 + Default value = 20 Parameters ---------- - my_fs_ratio : int, + my_fs_ratio : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_gradient_eval() >>> op.inputs.fs_ratio.connect(my_fs_ratio) - >>> #or + >>> # or >>> op.inputs.fs_ratio(my_fs_ratio) - """ return self._fs_ratio + class OutputsFftGradientEval(_Outputs): - """Intermediate class used to get outputs from fft_gradient_eval operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.fft_gradient_eval() - >>> # Connect inputs : op.inputs. ... - >>> result_coefficients = op.outputs.coefficients() + """Intermediate class used to get outputs from + fft_gradient_eval operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_gradient_eval() + >>> # Connect inputs : op.inputs. ... + >>> result_coefficients = op.outputs.coefficients() """ + def __init__(self, op: Operator): super().__init__(fft_gradient_eval._spec().outputs, op) - self._coefficients = Output(fft_gradient_eval._spec().output_pin(0), 0, op) + self._coefficients = Output(fft_gradient_eval._spec().output_pin(0), 0, op) self._outputs.append(self._coefficients) @property def coefficients(self): """Allows to get coefficients output of the operator - Returns ---------- - my_coefficients : FieldsContainer, + my_coefficients : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_gradient_eval() >>> # Connect inputs : op.inputs. ... - >>> result_coefficients = op.outputs.coefficients() - """ + >>> result_coefficients = op.outputs.coefficients() + """ # noqa: E501 return self._coefficients - diff --git a/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py b/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py index 12b527c65e7..8d8da9000cf 100644 --- a/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py +++ b/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py @@ -1,90 +1,176 @@ """ fft_multi_harmonic_minmax -========================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Math plugin, from "math" category -""" class fft_multi_harmonic_minmax(Operator): - """Evaluate min max fields on multi harmonic solution. min and max fields are calculated based on evaluating a fft wrt rpms and using the gradient method for adaptive time steping - - available inputs: - - fields_container (FieldsContainer) - - rpm_scoping (Scoping) (optional) - - fs_ratio (int) (optional) - - num_subdivisions (int) (optional) - - max_num_subdivisions (int) (optional) - - available outputs: - - field_min (FieldsContainer) - - field_max (FieldsContainer) - - all_fields (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_rpm_scoping = dpf.Scoping() - >>> op.inputs.rpm_scoping.connect(my_rpm_scoping) - >>> my_fs_ratio = int() - >>> op.inputs.fs_ratio.connect(my_fs_ratio) - >>> my_num_subdivisions = int() - >>> op.inputs.num_subdivisions.connect(my_num_subdivisions) - >>> my_max_num_subdivisions = int() - >>> op.inputs.max_num_subdivisions.connect(my_max_num_subdivisions) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.fft_multi_harmonic_minmax(fields_container=my_fields_container,rpm_scoping=my_rpm_scoping,fs_ratio=my_fs_ratio,num_subdivisions=my_num_subdivisions,max_num_subdivisions=my_max_num_subdivisions) - - >>> # Get output data - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() - >>> result_all_fields = op.outputs.all_fields()""" - def __init__(self, fields_container=None, rpm_scoping=None, fs_ratio=None, num_subdivisions=None, max_num_subdivisions=None, config=None, server=None): - super().__init__(name="fft_multi_harmonic_minmax", config = config, server = server) + """Evaluate min max fields on multi harmonic solution. min and max fields + are calculated based on evaluating a fft wrt rpms and using the + gradient method for adaptive time steping + + Parameters + ---------- + fields_container : FieldsContainer + rpm_scoping : Scoping, optional + Rpm scoping, by default the fft is evaluted + using all the rpms + fs_ratio : int, optional + Field or fields container with only one field + is expected + num_subdivisions : int, optional + Connect number subdivisions, used for uniform + discretization + max_num_subdivisions : int, optional + Connect max number subdivisions, used to + avoid huge number of sudivisions + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.fft_multi_harmonic_minmax() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_rpm_scoping = dpf.Scoping() + >>> op.inputs.rpm_scoping.connect(my_rpm_scoping) + >>> my_fs_ratio = int() + >>> op.inputs.fs_ratio.connect(my_fs_ratio) + >>> my_num_subdivisions = int() + >>> op.inputs.num_subdivisions.connect(my_num_subdivisions) + >>> my_max_num_subdivisions = int() + >>> op.inputs.max_num_subdivisions.connect(my_max_num_subdivisions) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.fft_multi_harmonic_minmax( + ... fields_container=my_fields_container, + ... rpm_scoping=my_rpm_scoping, + ... fs_ratio=my_fs_ratio, + ... num_subdivisions=my_num_subdivisions, + ... max_num_subdivisions=my_max_num_subdivisions, + ... ) + + >>> # Get output data + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + >>> result_all_fields = op.outputs.all_fields() + """ + + def __init__( + self, + fields_container=None, + rpm_scoping=None, + fs_ratio=None, + num_subdivisions=None, + max_num_subdivisions=None, + config=None, + server=None, + ): + super().__init__(name="fft_multi_harmonic_minmax", config=config, server=server) self._inputs = InputsFftMultiHarmonicMinmax(self) self._outputs = OutputsFftMultiHarmonicMinmax(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if rpm_scoping !=None: + if rpm_scoping is not None: self.inputs.rpm_scoping.connect(rpm_scoping) - if fs_ratio !=None: + if fs_ratio is not None: self.inputs.fs_ratio.connect(fs_ratio) - if num_subdivisions !=None: + if num_subdivisions is not None: self.inputs.num_subdivisions.connect(num_subdivisions) - if max_num_subdivisions !=None: + if max_num_subdivisions is not None: self.inputs.max_num_subdivisions.connect(max_num_subdivisions) @staticmethod def _spec(): - spec = Specification(description="""Evaluate min max fields on multi harmonic solution. min and max fields are calculated based on evaluating a fft wrt rpms and using the gradient method for adaptive time steping""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "rpm_scoping", type_names=["scoping"], optional=True, document="""rpm scoping, by default the fft is evaluted using all the rpms"""), - 2 : PinSpecification(name = "fs_ratio", type_names=["int32"], optional=True, document="""field or fields container with only one field is expected"""), - 3 : PinSpecification(name = "num_subdivisions", type_names=["int32"], optional=True, document="""connect number subdivisions, used for uniform discretization"""), - 4 : PinSpecification(name = "max_num_subdivisions", type_names=["int32"], optional=True, document="""connect max number subdivisions, used to avoid huge number of sudivisions""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_min", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "field_max", type_names=["fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "all_fields", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluate min max fields on multi harmonic solution. min and max fields + are calculated based on evaluating a fft wrt rpms and + using the gradient method for adaptive time steping""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="rpm_scoping", + type_names=["scoping"], + optional=True, + document="""Rpm scoping, by default the fft is evaluted + using all the rpms""", + ), + 2: PinSpecification( + name="fs_ratio", + type_names=["int32"], + optional=True, + document="""Field or fields container with only one field + is expected""", + ), + 3: PinSpecification( + name="num_subdivisions", + type_names=["int32"], + optional=True, + document="""Connect number subdivisions, used for uniform + discretization""", + ), + 4: PinSpecification( + name="max_num_subdivisions", + type_names=["int32"], + optional=True, + document="""Connect max number subdivisions, used to + avoid huge number of sudivisions""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_min", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="field_max", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="all_fields", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "fft_multi_harmonic_minmax") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="fft_multi_harmonic_minmax", server=server) @property def inputs(self): @@ -92,239 +178,239 @@ def inputs(self): Returns -------- - inputs : InputsFftMultiHarmonicMinmax + inputs : InputsFftMultiHarmonicMinmax """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFftMultiHarmonicMinmax + outputs : OutputsFftMultiHarmonicMinmax """ return super().outputs -#internal name: fft_multi_harmonic_minmax -#scripting name: fft_multi_harmonic_minmax class InputsFftMultiHarmonicMinmax(_Inputs): - """Intermediate class used to connect user inputs to fft_multi_harmonic_minmax operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_rpm_scoping = dpf.Scoping() - >>> op.inputs.rpm_scoping.connect(my_rpm_scoping) - >>> my_fs_ratio = int() - >>> op.inputs.fs_ratio.connect(my_fs_ratio) - >>> my_num_subdivisions = int() - >>> op.inputs.num_subdivisions.connect(my_num_subdivisions) - >>> my_max_num_subdivisions = int() - >>> op.inputs.max_num_subdivisions.connect(my_max_num_subdivisions) + """Intermediate class used to connect user inputs to + fft_multi_harmonic_minmax operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_multi_harmonic_minmax() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_rpm_scoping = dpf.Scoping() + >>> op.inputs.rpm_scoping.connect(my_rpm_scoping) + >>> my_fs_ratio = int() + >>> op.inputs.fs_ratio.connect(my_fs_ratio) + >>> my_num_subdivisions = int() + >>> op.inputs.num_subdivisions.connect(my_num_subdivisions) + >>> my_max_num_subdivisions = int() + >>> op.inputs.max_num_subdivisions.connect(my_max_num_subdivisions) """ + def __init__(self, op: Operator): super().__init__(fft_multi_harmonic_minmax._spec().inputs, op) - self._fields_container = Input(fft_multi_harmonic_minmax._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + fft_multi_harmonic_minmax._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._rpm_scoping = Input(fft_multi_harmonic_minmax._spec().input_pin(1), 1, op, -1) + self._rpm_scoping = Input( + fft_multi_harmonic_minmax._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._rpm_scoping) - self._fs_ratio = Input(fft_multi_harmonic_minmax._spec().input_pin(2), 2, op, -1) + self._fs_ratio = Input( + fft_multi_harmonic_minmax._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fs_ratio) - self._num_subdivisions = Input(fft_multi_harmonic_minmax._spec().input_pin(3), 3, op, -1) + self._num_subdivisions = Input( + fft_multi_harmonic_minmax._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._num_subdivisions) - self._max_num_subdivisions = Input(fft_multi_harmonic_minmax._spec().input_pin(4), 4, op, -1) + self._max_num_subdivisions = Input( + fft_multi_harmonic_minmax._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._max_num_subdivisions) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def rpm_scoping(self): - """Allows to connect rpm_scoping input to the operator + """Allows to connect rpm_scoping input to the operator. - - pindoc: rpm scoping, by default the fft is evaluted using all the rpms + Rpm scoping, by default the fft is evaluted + using all the rpms Parameters ---------- - my_rpm_scoping : Scoping, + my_rpm_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() >>> op.inputs.rpm_scoping.connect(my_rpm_scoping) - >>> #or + >>> # or >>> op.inputs.rpm_scoping(my_rpm_scoping) - """ return self._rpm_scoping @property def fs_ratio(self): - """Allows to connect fs_ratio input to the operator + """Allows to connect fs_ratio input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fs_ratio : int, + my_fs_ratio : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() >>> op.inputs.fs_ratio.connect(my_fs_ratio) - >>> #or + >>> # or >>> op.inputs.fs_ratio(my_fs_ratio) - """ return self._fs_ratio @property def num_subdivisions(self): - """Allows to connect num_subdivisions input to the operator + """Allows to connect num_subdivisions input to the operator. - - pindoc: connect number subdivisions, used for uniform discretization + Connect number subdivisions, used for uniform + discretization Parameters ---------- - my_num_subdivisions : int, + my_num_subdivisions : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() >>> op.inputs.num_subdivisions.connect(my_num_subdivisions) - >>> #or + >>> # or >>> op.inputs.num_subdivisions(my_num_subdivisions) - """ return self._num_subdivisions @property def max_num_subdivisions(self): - """Allows to connect max_num_subdivisions input to the operator + """Allows to connect max_num_subdivisions input to the operator. - - pindoc: connect max number subdivisions, used to avoid huge number of sudivisions + Connect max number subdivisions, used to + avoid huge number of sudivisions Parameters ---------- - my_max_num_subdivisions : int, + my_max_num_subdivisions : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() >>> op.inputs.max_num_subdivisions.connect(my_max_num_subdivisions) - >>> #or + >>> # or >>> op.inputs.max_num_subdivisions(my_max_num_subdivisions) - """ return self._max_num_subdivisions + class OutputsFftMultiHarmonicMinmax(_Outputs): - """Intermediate class used to get outputs from fft_multi_harmonic_minmax operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() - >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() - >>> result_all_fields = op.outputs.all_fields() + """Intermediate class used to get outputs from + fft_multi_harmonic_minmax operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_multi_harmonic_minmax() + >>> # Connect inputs : op.inputs. ... + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + >>> result_all_fields = op.outputs.all_fields() """ + def __init__(self, op: Operator): super().__init__(fft_multi_harmonic_minmax._spec().outputs, op) - self._field_min = Output(fft_multi_harmonic_minmax._spec().output_pin(0), 0, op) + self._field_min = Output(fft_multi_harmonic_minmax._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(fft_multi_harmonic_minmax._spec().output_pin(1), 1, op) + self._field_max = Output(fft_multi_harmonic_minmax._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) - self._all_fields = Output(fft_multi_harmonic_minmax._spec().output_pin(2), 2, op) + self._all_fields = Output( + fft_multi_harmonic_minmax._spec().output_pin(2), 2, op + ) self._outputs.append(self._all_fields) @property def field_min(self): """Allows to get field_min output of the operator - Returns ---------- - my_field_min : FieldsContainer, + my_field_min : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - """ + >>> result_field_min = op.outputs.field_min() + """ # noqa: E501 return self._field_min @property def field_max(self): """Allows to get field_max output of the operator - Returns ---------- - my_field_max : FieldsContainer, + my_field_max : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() >>> # Connect inputs : op.inputs. ... - >>> result_field_max = op.outputs.field_max() - """ + >>> result_field_max = op.outputs.field_max() + """ # noqa: E501 return self._field_max @property def all_fields(self): """Allows to get all_fields output of the operator - Returns ---------- - my_all_fields : FieldsContainer, + my_all_fields : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.fft_multi_harmonic_minmax() >>> # Connect inputs : op.inputs. ... - >>> result_all_fields = op.outputs.all_fields() - """ + >>> result_all_fields = op.outputs.all_fields() + """ # noqa: E501 return self._all_fields - diff --git a/ansys/dpf/core/operators/math/generalized_inner_product.py b/ansys/dpf/core/operators/math/generalized_inner_product.py index c48f1efb1e0..e7d1bb5ef46 100644 --- a/ansys/dpf/core/operators/math/generalized_inner_product.py +++ b/ansys/dpf/core/operators/math/generalized_inner_product.py @@ -1,66 +1,119 @@ """ generalized_inner_product -========================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class generalized_inner_product(Operator): - """Computes a general notion of inner product between two fields of possibly different dimensionality. - - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.generalized_inner_product() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.generalized_inner_product(fieldA=my_fieldA,fieldB=my_fieldB) + """Computes a general notion of inner product between two fields of + possibly different dimensionality. + + Parameters + ---------- + fieldA : Field or FieldsContainer or float + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.generalized_inner_product() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.generalized_inner_product( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="generalized_inner_product", config = config, server = server) + super().__init__(name="generalized_inner_product", config=config, server=server) self._inputs = InputsGeneralizedInnerProduct(self) self._outputs = OutputsGeneralizedInnerProduct(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes a general notion of inner product between two fields of possibly different dimensionality.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes a general notion of inner product between two fields of + possibly different dimensionality.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "generalized_inner_product") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="generalized_inner_product", server=server) @property def inputs(self): @@ -68,119 +121,115 @@ def inputs(self): Returns -------- - inputs : InputsGeneralizedInnerProduct + inputs : InputsGeneralizedInnerProduct """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsGeneralizedInnerProduct + outputs : OutputsGeneralizedInnerProduct """ return super().outputs -#internal name: generalized_inner_product -#scripting name: generalized_inner_product class InputsGeneralizedInnerProduct(_Inputs): - """Intermediate class used to connect user inputs to generalized_inner_product operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.generalized_inner_product() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + generalized_inner_product operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.generalized_inner_product() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(generalized_inner_product._spec().inputs, op) - self._fieldA = Input(generalized_inner_product._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(generalized_inner_product._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(generalized_inner_product._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(generalized_inner_product._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.generalized_inner_product() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.generalized_inner_product() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsGeneralizedInnerProduct(_Outputs): - """Intermediate class used to get outputs from generalized_inner_product operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.generalized_inner_product() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + generalized_inner_product operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.generalized_inner_product() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(generalized_inner_product._spec().outputs, op) - self._field = Output(generalized_inner_product._spec().output_pin(0), 0, op) + self._field = Output(generalized_inner_product._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.generalized_inner_product() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/generalized_inner_product_fc.py b/ansys/dpf/core/operators/math/generalized_inner_product_fc.py index 1967aab06f3..5fed88798e3 100644 --- a/ansys/dpf/core/operators/math/generalized_inner_product_fc.py +++ b/ansys/dpf/core/operators/math/generalized_inner_product_fc.py @@ -1,66 +1,129 @@ """ generalized_inner_product_fc -============================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class generalized_inner_product_fc(Operator): - """Computes a general notion of inner product between two fields of possibly different dimensionality. - - available inputs: - - field_or_fields_container_A (FieldsContainer) - - field_or_fields_container_B (FieldsContainer) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.generalized_inner_product_fc() - - >>> # Make input connections - >>> my_field_or_fields_container_A = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> my_field_or_fields_container_B = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.generalized_inner_product_fc(field_or_fields_container_A=my_field_or_fields_container_A,field_or_fields_container_B=my_field_or_fields_container_B) + """Computes a general notion of inner product between two fields of + possibly different dimensionality. + + Parameters + ---------- + field_or_fields_container_A : Field or FieldsContainer or float + Field or fields container with only one field + is expected + field_or_fields_container_B : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.generalized_inner_product_fc() + + >>> # Make input connections + >>> my_field_or_fields_container_A = dpf.Field() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> my_field_or_fields_container_B = dpf.Field() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.generalized_inner_product_fc( + ... field_or_fields_container_A=my_field_or_fields_container_A, + ... field_or_fields_container_B=my_field_or_fields_container_B, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, field_or_fields_container_A=None, field_or_fields_container_B=None, config=None, server=None): - super().__init__(name="generalized_inner_product_fc", config = config, server = server) + def __init__( + self, + field_or_fields_container_A=None, + field_or_fields_container_B=None, + config=None, + server=None, + ): + super().__init__( + name="generalized_inner_product_fc", config=config, server=server + ) self._inputs = InputsGeneralizedInnerProductFc(self) self._outputs = OutputsGeneralizedInnerProductFc(self) - if field_or_fields_container_A !=None: + if field_or_fields_container_A is not None: self.inputs.field_or_fields_container_A.connect(field_or_fields_container_A) - if field_or_fields_container_B !=None: + if field_or_fields_container_B is not None: self.inputs.field_or_fields_container_B.connect(field_or_fields_container_B) @staticmethod def _spec(): - spec = Specification(description="""Computes a general notion of inner product between two fields of possibly different dimensionality.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field_or_fields_container_A", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "field_or_fields_container_B", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes a general notion of inner product between two fields of + possibly different dimensionality.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field_or_fields_container_A", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="field_or_fields_container_B", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "generalized_inner_product_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="generalized_inner_product_fc", server=server + ) @property def inputs(self): @@ -68,119 +131,121 @@ def inputs(self): Returns -------- - inputs : InputsGeneralizedInnerProductFc + inputs : InputsGeneralizedInnerProductFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsGeneralizedInnerProductFc + outputs : OutputsGeneralizedInnerProductFc """ return super().outputs -#internal name: generalized_inner_product_fc -#scripting name: generalized_inner_product_fc class InputsGeneralizedInnerProductFc(_Inputs): - """Intermediate class used to connect user inputs to generalized_inner_product_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.generalized_inner_product_fc() - >>> my_field_or_fields_container_A = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> my_field_or_fields_container_B = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + """Intermediate class used to connect user inputs to + generalized_inner_product_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.generalized_inner_product_fc() + >>> my_field_or_fields_container_A = dpf.Field() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> my_field_or_fields_container_B = dpf.Field() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) """ + def __init__(self, op: Operator): super().__init__(generalized_inner_product_fc._spec().inputs, op) - self._field_or_fields_container_A = Input(generalized_inner_product_fc._spec().input_pin(0), 0, op, -1) + self._field_or_fields_container_A = Input( + generalized_inner_product_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field_or_fields_container_A) - self._field_or_fields_container_B = Input(generalized_inner_product_fc._spec().input_pin(1), 1, op, -1) + self._field_or_fields_container_B = Input( + generalized_inner_product_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._field_or_fields_container_B) @property def field_or_fields_container_A(self): - """Allows to connect field_or_fields_container_A input to the operator + """Allows to connect field_or_fields_container_A input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field_or_fields_container_A : FieldsContainer, + my_field_or_fields_container_A : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.generalized_inner_product_fc() >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> #or + >>> # or >>> op.inputs.field_or_fields_container_A(my_field_or_fields_container_A) - """ return self._field_or_fields_container_A @property def field_or_fields_container_B(self): - """Allows to connect field_or_fields_container_B input to the operator + """Allows to connect field_or_fields_container_B input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field_or_fields_container_B : FieldsContainer, + my_field_or_fields_container_B : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.generalized_inner_product_fc() >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) - >>> #or + >>> # or >>> op.inputs.field_or_fields_container_B(my_field_or_fields_container_B) - """ return self._field_or_fields_container_B + class OutputsGeneralizedInnerProductFc(_Outputs): - """Intermediate class used to get outputs from generalized_inner_product_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.generalized_inner_product_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + generalized_inner_product_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.generalized_inner_product_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(generalized_inner_product_fc._spec().outputs, op) - self._fields_container = Output(generalized_inner_product_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + generalized_inner_product_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.generalized_inner_product_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/img_part.py b/ansys/dpf/core/operators/math/img_part.py index 7c6064c813c..7afe4ecf6af 100644 --- a/ansys/dpf/core/operators/math/img_part.py +++ b/ansys/dpf/core/operators/math/img_part.py @@ -1,60 +1,91 @@ """ img_part -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class img_part(Operator): - """Extracts element-wise imaginary part of field containers containing complex fields. + """Extracts element-wise imaginary part of field containers containing + complex fields. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.img_part() - >>> # Instantiate operator - >>> op = dpf.operators.math.img_part() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.img_part( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.img_part(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="img_part", config = config, server = server) + super().__init__(name="img_part", config=config, server=server) self._inputs = InputsImgPart(self) self._outputs = OutputsImgPart(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Extracts element-wise imaginary part of field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Extracts element-wise imaginary part of field containers containing + complex fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "img_part") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="img_part", server=server) @property def inputs(self): @@ -62,91 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsImgPart + inputs : InputsImgPart """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsImgPart + outputs : OutputsImgPart """ return super().outputs -#internal name: img_part -#scripting name: img_part class InputsImgPart(_Inputs): - """Intermediate class used to connect user inputs to img_part operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.img_part() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + img_part operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.img_part() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(img_part._spec().inputs, op) - self._fields_container = Input(img_part._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(img_part._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.img_part() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsImgPart(_Outputs): - """Intermediate class used to get outputs from img_part operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.img_part() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + img_part operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.img_part() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(img_part._spec().outputs, op) - self._fields_container = Output(img_part._spec().output_pin(0), 0, op) + self._fields_container = Output(img_part._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.img_part() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/invert.py b/ansys/dpf/core/operators/math/invert.py index 7bcb940ff44..f525b9025f4 100644 --- a/ansys/dpf/core/operators/math/invert.py +++ b/ansys/dpf/core/operators/math/invert.py @@ -1,60 +1,94 @@ """ invert -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class invert(Operator): """Compute the element-wise, component-wise, inverse of a field (1./x) - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.invert() - >>> # Instantiate operator - >>> op = dpf.operators.math.invert() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.invert( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.invert(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="invert", config = config, server = server) + super().__init__(name="invert", config=config, server=server) self._inputs = InputsInvert(self) self._outputs = OutputsInvert(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Compute the element-wise, component-wise, inverse of a field (1./x)""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = ( + """Compute the element-wise, component-wise, inverse of a field (1./x)""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "invert") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="invert", server=server) @property def inputs(self): @@ -62,93 +96,90 @@ def inputs(self): Returns -------- - inputs : InputsInvert + inputs : InputsInvert """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsInvert + outputs : OutputsInvert """ return super().outputs -#internal name: invert -#scripting name: invert class InputsInvert(_Inputs): - """Intermediate class used to connect user inputs to invert operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.invert() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + invert operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.invert() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(invert._spec().inputs, op) - self._field = Input(invert._spec().input_pin(0), 0, op, -1) + self._field = Input(invert._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.invert() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsInvert(_Outputs): - """Intermediate class used to get outputs from invert operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.invert() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + invert operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.invert() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(invert._spec().outputs, op) - self._field = Output(invert._spec().output_pin(0), 0, op) + self._field = Output(invert._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.invert() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/invert_fc.py b/ansys/dpf/core/operators/math/invert_fc.py index b0b62bb6b13..9c444b2033b 100644 --- a/ansys/dpf/core/operators/math/invert_fc.py +++ b/ansys/dpf/core/operators/math/invert_fc.py @@ -1,60 +1,94 @@ """ invert_fc -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class invert_fc(Operator): """Compute the element-wise, component-wise, inverse of a field (1./x) - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.invert_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.invert_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.invert_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.invert_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="invert_fc", config = config, server = server) + super().__init__(name="invert_fc", config=config, server=server) self._inputs = InputsInvertFc(self) self._outputs = OutputsInvertFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Compute the element-wise, component-wise, inverse of a field (1./x)""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = ( + """Compute the element-wise, component-wise, inverse of a field (1./x)""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "invert_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="invert_fc", server=server) @property def inputs(self): @@ -62,93 +96,90 @@ def inputs(self): Returns -------- - inputs : InputsInvertFc + inputs : InputsInvertFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsInvertFc + outputs : OutputsInvertFc """ return super().outputs -#internal name: invert_fc -#scripting name: invert_fc class InputsInvertFc(_Inputs): - """Intermediate class used to connect user inputs to invert_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.invert_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + invert_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.invert_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(invert_fc._spec().inputs, op) - self._fields_container = Input(invert_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(invert_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.invert_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsInvertFc(_Outputs): - """Intermediate class used to get outputs from invert_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.invert_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + invert_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.invert_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(invert_fc._spec().outputs, op) - self._fields_container = Output(invert_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(invert_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.invert_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/kronecker_prod.py b/ansys/dpf/core/operators/math/kronecker_prod.py index 4d5a9bc9298..d4282641315 100644 --- a/ansys/dpf/core/operators/math/kronecker_prod.py +++ b/ansys/dpf/core/operators/math/kronecker_prod.py @@ -1,66 +1,109 @@ """ kronecker_prod -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class kronecker_prod(Operator): """Computes element-wise Kronecker product between two tensor fields. - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.kronecker_prod() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.kronecker_prod(fieldA=my_fieldA,fieldB=my_fieldB) + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.kronecker_prod() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.kronecker_prod( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="kronecker_prod", config = config, server = server) + super().__init__(name="kronecker_prod", config=config, server=server) self._inputs = InputsKroneckerProd(self) self._outputs = OutputsKroneckerProd(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise Kronecker product between two tensor fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = ( + """Computes element-wise Kronecker product between two tensor fields.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "kronecker_prod") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="kronecker_prod", server=server) @property def inputs(self): @@ -68,119 +111,115 @@ def inputs(self): Returns -------- - inputs : InputsKroneckerProd + inputs : InputsKroneckerProd """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsKroneckerProd + outputs : OutputsKroneckerProd """ return super().outputs -#internal name: kronecker_prod -#scripting name: kronecker_prod class InputsKroneckerProd(_Inputs): - """Intermediate class used to connect user inputs to kronecker_prod operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.kronecker_prod() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + kronecker_prod operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.kronecker_prod() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(kronecker_prod._spec().inputs, op) - self._fieldA = Input(kronecker_prod._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(kronecker_prod._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(kronecker_prod._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(kronecker_prod._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.kronecker_prod() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.kronecker_prod() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsKroneckerProd(_Outputs): - """Intermediate class used to get outputs from kronecker_prod operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.kronecker_prod() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + kronecker_prod operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.kronecker_prod() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(kronecker_prod._spec().outputs, op) - self._field = Output(kronecker_prod._spec().output_pin(0), 0, op) + self._field = Output(kronecker_prod._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.kronecker_prod() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/linear_combination.py b/ansys/dpf/core/operators/math/linear_combination.py index f5202da58a8..f90e11f14d4 100644 --- a/ansys/dpf/core/operators/math/linear_combination.py +++ b/ansys/dpf/core/operators/math/linear_combination.py @@ -1,84 +1,150 @@ """ linear_combination -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class linear_combination(Operator): - """Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in 1,2,4) are complex numbers. - - available inputs: - - a (float) - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - - b (float) - - fields_containerC (FieldsContainer) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.linear_combination() - - >>> # Make input connections - >>> my_a = float() - >>> op.inputs.a.connect(my_a) - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> my_b = float() - >>> op.inputs.b.connect(my_b) - >>> my_fields_containerC = dpf.FieldsContainer() - >>> op.inputs.fields_containerC.connect(my_fields_containerC) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.linear_combination(a=my_a,fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB,b=my_b,fields_containerC=my_fields_containerC) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, a=None, fields_containerA=None, fields_containerB=None, b=None, fields_containerC=None, config=None, server=None): - super().__init__(name="CplxOp", config = config, server = server) + """Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in + 1,2,4) are complex numbers. + + Parameters + ---------- + a : float + Double + fields_containerA : FieldsContainer + fields_containerB : FieldsContainer + b : float + Double + fields_containerC : FieldsContainer + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.linear_combination() + + >>> # Make input connections + >>> my_a = float() + >>> op.inputs.a.connect(my_a) + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> my_b = float() + >>> op.inputs.b.connect(my_b) + >>> my_fields_containerC = dpf.FieldsContainer() + >>> op.inputs.fields_containerC.connect(my_fields_containerC) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.linear_combination( + ... a=my_a, + ... fields_containerA=my_fields_containerA, + ... fields_containerB=my_fields_containerB, + ... b=my_b, + ... fields_containerC=my_fields_containerC, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + a=None, + fields_containerA=None, + fields_containerB=None, + b=None, + fields_containerC=None, + config=None, + server=None, + ): + super().__init__(name="CplxOp", config=config, server=server) self._inputs = InputsLinearCombination(self) self._outputs = OutputsLinearCombination(self) - if a !=None: + if a is not None: self.inputs.a.connect(a) - if fields_containerA !=None: + if fields_containerA is not None: self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: + if fields_containerB is not None: self.inputs.fields_containerB.connect(fields_containerB) - if b !=None: + if b is not None: self.inputs.b.connect(b) - if fields_containerC !=None: + if fields_containerC is not None: self.inputs.fields_containerC.connect(fields_containerC) @staticmethod def _spec(): - spec = Specification(description="""Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in 1,2,4) are complex numbers.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "a", type_names=["double"], optional=False, document="""Double"""), - 1 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document=""""""), - 3 : PinSpecification(name = "b", type_names=["double"], optional=False, document="""Double"""), - 4 : PinSpecification(name = "fields_containerC", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in + 1,2,4) are complex numbers.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="a", + type_names=["double"], + optional=False, + document="""Double""", + ), + 1: PinSpecification( + name="fields_containerA", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="fields_containerB", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="b", + type_names=["double"], + optional=False, + document="""Double""", + ), + 4: PinSpecification( + name="fields_containerC", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "CplxOp") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="CplxOp", server=server) @property def inputs(self): @@ -86,191 +152,185 @@ def inputs(self): Returns -------- - inputs : InputsLinearCombination + inputs : InputsLinearCombination """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsLinearCombination + outputs : OutputsLinearCombination """ return super().outputs -#internal name: CplxOp -#scripting name: linear_combination class InputsLinearCombination(_Inputs): - """Intermediate class used to connect user inputs to linear_combination operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.linear_combination() - >>> my_a = float() - >>> op.inputs.a.connect(my_a) - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> my_b = float() - >>> op.inputs.b.connect(my_b) - >>> my_fields_containerC = dpf.FieldsContainer() - >>> op.inputs.fields_containerC.connect(my_fields_containerC) + """Intermediate class used to connect user inputs to + linear_combination operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.linear_combination() + >>> my_a = float() + >>> op.inputs.a.connect(my_a) + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> my_b = float() + >>> op.inputs.b.connect(my_b) + >>> my_fields_containerC = dpf.FieldsContainer() + >>> op.inputs.fields_containerC.connect(my_fields_containerC) """ + def __init__(self, op: Operator): super().__init__(linear_combination._spec().inputs, op) - self._a = Input(linear_combination._spec().input_pin(0), 0, op, -1) + self._a = Input(linear_combination._spec().input_pin(0), 0, op, -1) self._inputs.append(self._a) - self._fields_containerA = Input(linear_combination._spec().input_pin(1), 1, op, -1) + self._fields_containerA = Input( + linear_combination._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(linear_combination._spec().input_pin(2), 2, op, -1) + self._fields_containerB = Input( + linear_combination._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_containerB) - self._b = Input(linear_combination._spec().input_pin(3), 3, op, -1) + self._b = Input(linear_combination._spec().input_pin(3), 3, op, -1) self._inputs.append(self._b) - self._fields_containerC = Input(linear_combination._spec().input_pin(4), 4, op, -1) + self._fields_containerC = Input( + linear_combination._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._fields_containerC) @property def a(self): - """Allows to connect a input to the operator + """Allows to connect a input to the operator. - - pindoc: Double + Double Parameters ---------- - my_a : float, + my_a : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.linear_combination() >>> op.inputs.a.connect(my_a) - >>> #or + >>> # or >>> op.inputs.a(my_a) - """ return self._a @property def fields_containerA(self): - """Allows to connect fields_containerA input to the operator + """Allows to connect fields_containerA input to the operator. Parameters ---------- - my_fields_containerA : FieldsContainer, + my_fields_containerA : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.linear_combination() >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or + >>> # or >>> op.inputs.fields_containerA(my_fields_containerA) - """ return self._fields_containerA @property def fields_containerB(self): - """Allows to connect fields_containerB input to the operator + """Allows to connect fields_containerB input to the operator. Parameters ---------- - my_fields_containerB : FieldsContainer, + my_fields_containerB : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.linear_combination() >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or + >>> # or >>> op.inputs.fields_containerB(my_fields_containerB) - """ return self._fields_containerB @property def b(self): - """Allows to connect b input to the operator + """Allows to connect b input to the operator. - - pindoc: Double + Double Parameters ---------- - my_b : float, + my_b : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.linear_combination() >>> op.inputs.b.connect(my_b) - >>> #or + >>> # or >>> op.inputs.b(my_b) - """ return self._b @property def fields_containerC(self): - """Allows to connect fields_containerC input to the operator + """Allows to connect fields_containerC input to the operator. Parameters ---------- - my_fields_containerC : FieldsContainer, + my_fields_containerC : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.linear_combination() >>> op.inputs.fields_containerC.connect(my_fields_containerC) - >>> #or + >>> # or >>> op.inputs.fields_containerC(my_fields_containerC) - """ return self._fields_containerC + class OutputsLinearCombination(_Outputs): - """Intermediate class used to get outputs from linear_combination operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.linear_combination() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + linear_combination operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.linear_combination() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(linear_combination._spec().outputs, op) - self._fields_container = Output(linear_combination._spec().output_pin(0), 0, op) + self._fields_container = Output(linear_combination._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.linear_combination() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/ln.py b/ansys/dpf/core/operators/math/ln.py new file mode 100644 index 00000000000..80389db6fc2 --- /dev/null +++ b/ansys/dpf/core/operators/math/ln.py @@ -0,0 +1,188 @@ +""" +ln +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class ln(Operator): + """Computes element-wise ln(field[i]). + + Parameters + ---------- + field : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.ln() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.ln( + ... field=my_field, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, field=None, config=None, server=None): + super().__init__(name="ln", config=config, server=server) + self._inputs = InputsLn(self) + self._outputs = OutputsLn(self) + if field is not None: + self.inputs.field.connect(field) + + @staticmethod + def _spec(): + description = """Computes element-wise ln(field[i]).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ln", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsLn + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsLn + """ + return super().outputs + + +class InputsLn(_Inputs): + """Intermediate class used to connect user inputs to + ln operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.ln() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + """ + + def __init__(self, op: Operator): + super().__init__(ln._spec().inputs, op) + self._field = Input(ln._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._field) + + @property + def field(self): + """Allows to connect field input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_field : Field or FieldsContainer or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.ln() + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) + """ + return self._field + + +class OutputsLn(_Outputs): + """Intermediate class used to get outputs from + ln operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.ln() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(ln._spec().outputs, op) + self._field = Output(ln._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.ln() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/math/ln_fc.py b/ansys/dpf/core/operators/math/ln_fc.py new file mode 100644 index 00000000000..ef8c6f51749 --- /dev/null +++ b/ansys/dpf/core/operators/math/ln_fc.py @@ -0,0 +1,183 @@ +""" +ln_fc +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class ln_fc(Operator): + """Computes element-wise ln(field[i]). + + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.ln_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.ln_fc( + ... fields_container=my_fields_container, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, fields_container=None, config=None, server=None): + super().__init__(name="ln_fc", config=config, server=server) + self._inputs = InputsLnFc(self) + self._outputs = OutputsLnFc(self) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + + @staticmethod + def _spec(): + description = """Computes element-wise ln(field[i]).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ln_fc", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsLnFc + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsLnFc + """ + return super().outputs + + +class InputsLnFc(_Inputs): + """Intermediate class used to connect user inputs to + ln_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.ln_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + """ + + def __init__(self, op: Operator): + super().__init__(ln_fc._spec().inputs, op) + self._fields_container = Input(ln_fc._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.ln_fc() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + +class OutputsLnFc(_Outputs): + """Intermediate class used to get outputs from + ln_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.ln_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(ln_fc._spec().outputs, op) + self._fields_container = Output(ln_fc._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.ln_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/math/make_one_on_comp.py b/ansys/dpf/core/operators/math/make_one_on_comp.py new file mode 100644 index 00000000000..b1dea7e1058 --- /dev/null +++ b/ansys/dpf/core/operators/math/make_one_on_comp.py @@ -0,0 +1,213 @@ +""" +make_one_on_comp +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class make_one_on_comp(Operator): + """take the input field's scoping and create a field full of zeros, + except for the indexes from pin 1 that will hold 1.0. + + Parameters + ---------- + fieldA : Field + scalar_int : int + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.make_one_on_comp() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_scalar_int = int() + >>> op.inputs.scalar_int.connect(my_scalar_int) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.make_one_on_comp( + ... fieldA=my_fieldA, + ... scalar_int=my_scalar_int, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, fieldA=None, scalar_int=None, config=None, server=None): + super().__init__(name="make_one_on_comp", config=config, server=server) + self._inputs = InputsMakeOneOnComp(self) + self._outputs = OutputsMakeOneOnComp(self) + if fieldA is not None: + self.inputs.fieldA.connect(fieldA) + if scalar_int is not None: + self.inputs.scalar_int.connect(scalar_int) + + @staticmethod + def _spec(): + description = """take the input field's scoping and create a field full of zeros, + except for the indexes from pin 1 that will hold 1.0.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="scalar_int", + type_names=["int32"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="make_one_on_comp", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMakeOneOnComp + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMakeOneOnComp + """ + return super().outputs + + +class InputsMakeOneOnComp(_Inputs): + """Intermediate class used to connect user inputs to + make_one_on_comp operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.make_one_on_comp() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_scalar_int = int() + >>> op.inputs.scalar_int.connect(my_scalar_int) + """ + + def __init__(self, op: Operator): + super().__init__(make_one_on_comp._spec().inputs, op) + self._fieldA = Input(make_one_on_comp._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fieldA) + self._scalar_int = Input(make_one_on_comp._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scalar_int) + + @property + def fieldA(self): + """Allows to connect fieldA input to the operator. + + Parameters + ---------- + my_fieldA : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.make_one_on_comp() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> # or + >>> op.inputs.fieldA(my_fieldA) + """ + return self._fieldA + + @property + def scalar_int(self): + """Allows to connect scalar_int input to the operator. + + Parameters + ---------- + my_scalar_int : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.make_one_on_comp() + >>> op.inputs.scalar_int.connect(my_scalar_int) + >>> # or + >>> op.inputs.scalar_int(my_scalar_int) + """ + return self._scalar_int + + +class OutputsMakeOneOnComp(_Outputs): + """Intermediate class used to get outputs from + make_one_on_comp operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.make_one_on_comp() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(make_one_on_comp._spec().outputs, op) + self._field = Output(make_one_on_comp._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.make_one_on_comp() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/math/matrix_inverse.py b/ansys/dpf/core/operators/math/matrix_inverse.py index f365efded4e..89c8d250a29 100644 --- a/ansys/dpf/core/operators/math/matrix_inverse.py +++ b/ansys/dpf/core/operators/math/matrix_inverse.py @@ -1,60 +1,92 @@ """ matrix_inverse -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Math plugin, from "math" category -""" class matrix_inverse(Operator): """computes the complex matrix inverse at a given fields container. - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + Fields_container + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.matrix_inverse() - >>> # Instantiate operator - >>> op = dpf.operators.math.matrix_inverse() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.matrix_inverse( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.matrix_inverse(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="inverseOp", config = config, server = server) + super().__init__(name="inverseOp", config=config, server=server) self._inputs = InputsMatrixInverse(self) self._outputs = OutputsMatrixInverse(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""computes the complex matrix inverse at a given fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""fields_container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = ( + """computes the complex matrix inverse at a given fields container.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fields_container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "inverseOp") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="inverseOp", server=server) @property def inputs(self): @@ -62,93 +94,89 @@ def inputs(self): Returns -------- - inputs : InputsMatrixInverse + inputs : InputsMatrixInverse """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMatrixInverse + outputs : OutputsMatrixInverse """ return super().outputs -#internal name: inverseOp -#scripting name: matrix_inverse class InputsMatrixInverse(_Inputs): - """Intermediate class used to connect user inputs to matrix_inverse operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.matrix_inverse() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + matrix_inverse operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.matrix_inverse() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(matrix_inverse._spec().inputs, op) - self._fields_container = Input(matrix_inverse._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(matrix_inverse._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: fields_container + Fields_container Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.matrix_inverse() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsMatrixInverse(_Outputs): - """Intermediate class used to get outputs from matrix_inverse operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.matrix_inverse() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + matrix_inverse operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.matrix_inverse() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(matrix_inverse._spec().outputs, op) - self._fields_container = Output(matrix_inverse._spec().output_pin(0), 0, op) + self._fields_container = Output(matrix_inverse._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.matrix_inverse() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/minus.py b/ansys/dpf/core/operators/math/minus.py index 030f4ade653..936c31f6436 100644 --- a/ansys/dpf/core/operators/math/minus.py +++ b/ansys/dpf/core/operators/math/minus.py @@ -1,66 +1,124 @@ """ minus -===== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class minus(Operator): - """Computes the difference of two fields. If one field's scoping has 'overall' location, then these field's values are applied on the entire other field. - - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.minus() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.minus(fieldA=my_fieldA,fieldB=my_fieldB) + """Computes the difference of two fields. If one field's scoping has + 'overall' location, then these field's values are applied on the + entire other field.When using a constant or 'work_by_index', it's + possible to use 'inplace' to reuse one of the fields. + + Parameters + ---------- + fieldA : Field or FieldsContainer or float + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.minus() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.minus( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="minus", config = config, server = server) + super().__init__(name="minus", config=config, server=server) self._inputs = InputsMinus(self) self._outputs = OutputsMinus(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes the difference of two fields. If one field's scoping has 'overall' location, then these field's values are applied on the entire other field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes the difference of two fields. If one field's scoping has + 'overall' location, then these field's values are applied + on the entire other field.When using a constant or + 'work_by_index', it's possible to use 'inplace' to reuse + one of the fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "minus") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="minus", server=server) @property def inputs(self): @@ -68,119 +126,115 @@ def inputs(self): Returns -------- - inputs : InputsMinus + inputs : InputsMinus """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinus + outputs : OutputsMinus """ return super().outputs -#internal name: minus -#scripting name: minus class InputsMinus(_Inputs): - """Intermediate class used to connect user inputs to minus operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.minus() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + minus operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.minus() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(minus._spec().inputs, op) - self._fieldA = Input(minus._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(minus._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(minus._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(minus._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.minus() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.minus() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsMinus(_Outputs): - """Intermediate class used to get outputs from minus operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.minus() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + minus operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.minus() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(minus._spec().outputs, op) - self._field = Output(minus._spec().output_pin(0), 0, op) + self._field = Output(minus._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.minus() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/minus_fc.py b/ansys/dpf/core/operators/math/minus_fc.py index adb6513a9be..6bfb792f468 100644 --- a/ansys/dpf/core/operators/math/minus_fc.py +++ b/ansys/dpf/core/operators/math/minus_fc.py @@ -1,66 +1,130 @@ """ minus_fc -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class minus_fc(Operator): - """Computes the difference of two fields. If one field's scoping has 'overall' location, then these field's values are applied on the entire other field. - - available inputs: - - field_or_fields_container_A (FieldsContainer) - - field_or_fields_container_B (FieldsContainer) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.minus_fc() - - >>> # Make input connections - >>> my_field_or_fields_container_A = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> my_field_or_fields_container_B = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.minus_fc(field_or_fields_container_A=my_field_or_fields_container_A,field_or_fields_container_B=my_field_or_fields_container_B) + """Computes the difference of two fields. If one field's scoping has + 'overall' location, then these field's values are applied on the + entire other field.When using a constant or 'work_by_index', it's + possible to use 'inplace' to reuse one of the fields. + + Parameters + ---------- + field_or_fields_container_A : Field or FieldsContainer or float + Field or fields container with only one field + is expected + field_or_fields_container_B : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.minus_fc() + + >>> # Make input connections + >>> my_field_or_fields_container_A = dpf.Field() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> my_field_or_fields_container_B = dpf.Field() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.minus_fc( + ... field_or_fields_container_A=my_field_or_fields_container_A, + ... field_or_fields_container_B=my_field_or_fields_container_B, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, field_or_fields_container_A=None, field_or_fields_container_B=None, config=None, server=None): - super().__init__(name="minus_fc", config = config, server = server) + def __init__( + self, + field_or_fields_container_A=None, + field_or_fields_container_B=None, + config=None, + server=None, + ): + super().__init__(name="minus_fc", config=config, server=server) self._inputs = InputsMinusFc(self) self._outputs = OutputsMinusFc(self) - if field_or_fields_container_A !=None: + if field_or_fields_container_A is not None: self.inputs.field_or_fields_container_A.connect(field_or_fields_container_A) - if field_or_fields_container_B !=None: + if field_or_fields_container_B is not None: self.inputs.field_or_fields_container_B.connect(field_or_fields_container_B) @staticmethod def _spec(): - spec = Specification(description="""Computes the difference of two fields. If one field's scoping has 'overall' location, then these field's values are applied on the entire other field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field_or_fields_container_A", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "field_or_fields_container_B", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes the difference of two fields. If one field's scoping has + 'overall' location, then these field's values are applied + on the entire other field.When using a constant or + 'work_by_index', it's possible to use 'inplace' to reuse + one of the fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field_or_fields_container_A", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="field_or_fields_container_B", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "minus_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="minus_fc", server=server) @property def inputs(self): @@ -68,119 +132,119 @@ def inputs(self): Returns -------- - inputs : InputsMinusFc + inputs : InputsMinusFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinusFc + outputs : OutputsMinusFc """ return super().outputs -#internal name: minus_fc -#scripting name: minus_fc class InputsMinusFc(_Inputs): - """Intermediate class used to connect user inputs to minus_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.minus_fc() - >>> my_field_or_fields_container_A = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> my_field_or_fields_container_B = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + """Intermediate class used to connect user inputs to + minus_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.minus_fc() + >>> my_field_or_fields_container_A = dpf.Field() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> my_field_or_fields_container_B = dpf.Field() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) """ + def __init__(self, op: Operator): super().__init__(minus_fc._spec().inputs, op) - self._field_or_fields_container_A = Input(minus_fc._spec().input_pin(0), 0, op, -1) + self._field_or_fields_container_A = Input( + minus_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field_or_fields_container_A) - self._field_or_fields_container_B = Input(minus_fc._spec().input_pin(1), 1, op, -1) + self._field_or_fields_container_B = Input( + minus_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._field_or_fields_container_B) @property def field_or_fields_container_A(self): - """Allows to connect field_or_fields_container_A input to the operator + """Allows to connect field_or_fields_container_A input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field_or_fields_container_A : FieldsContainer, + my_field_or_fields_container_A : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.minus_fc() >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> #or + >>> # or >>> op.inputs.field_or_fields_container_A(my_field_or_fields_container_A) - """ return self._field_or_fields_container_A @property def field_or_fields_container_B(self): - """Allows to connect field_or_fields_container_B input to the operator + """Allows to connect field_or_fields_container_B input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field_or_fields_container_B : FieldsContainer, + my_field_or_fields_container_B : Field or FieldsContainer or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.minus_fc() >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) - >>> #or + >>> # or >>> op.inputs.field_or_fields_container_B(my_field_or_fields_container_B) - """ return self._field_or_fields_container_B + class OutputsMinusFc(_Outputs): - """Intermediate class used to get outputs from minus_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.minus_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + minus_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.minus_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(minus_fc._spec().outputs, op) - self._fields_container = Output(minus_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(minus_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.minus_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/modal_superposition.py b/ansys/dpf/core/operators/math/modal_superposition.py index 899370cc373..dd2291d0b22 100644 --- a/ansys/dpf/core/operators/math/modal_superposition.py +++ b/ansys/dpf/core/operators/math/modal_superposition.py @@ -1,78 +1,166 @@ """ modal_superposition -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "math" category -""" class modal_superposition(Operator): - """Compute the solution in the time/frequency space from a modal solution by multiplying a modal basis (in 0) by the solution in this modal space (coefficients for each mode for each time/frequency) (in 1). - - available inputs: - - modal_basis (FieldsContainer) - - solution_in_modal_space (FieldsContainer) - - time_scoping (Scoping, list) (optional) - - mesh_scoping (Scoping, ScopingsContainer) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.modal_superposition() - - >>> # Make input connections - >>> my_modal_basis = dpf.FieldsContainer() - >>> op.inputs.modal_basis.connect(my_modal_basis) - >>> my_solution_in_modal_space = dpf.FieldsContainer() - >>> op.inputs.solution_in_modal_space.connect(my_solution_in_modal_space) - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.modal_superposition(modal_basis=my_modal_basis,solution_in_modal_space=my_solution_in_modal_space,time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, modal_basis=None, solution_in_modal_space=None, time_scoping=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="expansion::modal_superposition", config = config, server = server) + """Compute the solution in the time/frequency space from a modal solution + by multiplying a modal basis (in 0) by the solution in this modal + space (coefficients for each mode for each time/frequency) (in 1). + + Parameters + ---------- + modal_basis : FieldsContainer + One field by mode with each field + representing a mode shape on nodes or + elements + solution_in_modal_space : FieldsContainer + One field by time/frequency with each field + having a ponderating coefficient for + each mode of the modal_basis pin + time_scoping : Scoping, optional + This input allows to compute the result on a + subset of the time frequency domain + defined in the + solution_in_modal_space fields + container + mesh_scoping : Scoping or ScopingsContainer, optional + This input allows to compute the result on a + subset of the space domain defined in + the modal_basis fields container + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.modal_superposition() + + >>> # Make input connections + >>> my_modal_basis = dpf.FieldsContainer() + >>> op.inputs.modal_basis.connect(my_modal_basis) + >>> my_solution_in_modal_space = dpf.FieldsContainer() + >>> op.inputs.solution_in_modal_space.connect(my_solution_in_modal_space) + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.modal_superposition( + ... modal_basis=my_modal_basis, + ... solution_in_modal_space=my_solution_in_modal_space, + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + modal_basis=None, + solution_in_modal_space=None, + time_scoping=None, + mesh_scoping=None, + config=None, + server=None, + ): + super().__init__( + name="expansion::modal_superposition", config=config, server=server + ) self._inputs = InputsModalSuperposition(self) self._outputs = OutputsModalSuperposition(self) - if modal_basis !=None: + if modal_basis is not None: self.inputs.modal_basis.connect(modal_basis) - if solution_in_modal_space !=None: + if solution_in_modal_space is not None: self.inputs.solution_in_modal_space.connect(solution_in_modal_space) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) @staticmethod def _spec(): - spec = Specification(description="""Compute the solution in the time/frequency space from a modal solution by multiplying a modal basis (in 0) by the solution in this modal space (coefficients for each mode for each time/frequency) (in 1).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "modal_basis", type_names=["fields_container"], optional=False, document="""one field by mode with each field representing a mode shape on nodes or elements"""), - 1 : PinSpecification(name = "solution_in_modal_space", type_names=["fields_container"], optional=False, document="""one field by time/frequency with each field having a ponderating coefficient for each mode of the modal_basis pin"""), - 3 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document="""this input allows to compute the result on a subset of the time frequency domain defined in the solution_in_modal_space fields container"""), - 4 : PinSpecification(name = "mesh_scoping", type_names=["scoping","scopings_container"], optional=True, document="""this input allows to compute the result on a subset of the space domain defined in the modal_basis fields container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Compute the solution in the time/frequency space from a modal solution + by multiplying a modal basis (in 0) by the solution in + this modal space (coefficients for each mode for each + time/frequency) (in 1).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="modal_basis", + type_names=["fields_container"], + optional=False, + document="""One field by mode with each field + representing a mode shape on nodes or + elements""", + ), + 1: PinSpecification( + name="solution_in_modal_space", + type_names=["fields_container"], + optional=False, + document="""One field by time/frequency with each field + having a ponderating coefficient for + each mode of the modal_basis pin""", + ), + 3: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""This input allows to compute the result on a + subset of the time frequency domain + defined in the + solution_in_modal_space fields + container""", + ), + 4: PinSpecification( + name="mesh_scoping", + type_names=["scoping", "scopings_container"], + optional=True, + document="""This input allows to compute the result on a + subset of the space domain defined in + the modal_basis fields container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "expansion::modal_superposition") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="expansion::modal_superposition", server=server + ) @property def inputs(self): @@ -80,171 +168,175 @@ def inputs(self): Returns -------- - inputs : InputsModalSuperposition + inputs : InputsModalSuperposition """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsModalSuperposition + outputs : OutputsModalSuperposition """ return super().outputs -#internal name: expansion::modal_superposition -#scripting name: modal_superposition class InputsModalSuperposition(_Inputs): - """Intermediate class used to connect user inputs to modal_superposition operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.modal_superposition() - >>> my_modal_basis = dpf.FieldsContainer() - >>> op.inputs.modal_basis.connect(my_modal_basis) - >>> my_solution_in_modal_space = dpf.FieldsContainer() - >>> op.inputs.solution_in_modal_space.connect(my_solution_in_modal_space) - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + modal_superposition operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.modal_superposition() + >>> my_modal_basis = dpf.FieldsContainer() + >>> op.inputs.modal_basis.connect(my_modal_basis) + >>> my_solution_in_modal_space = dpf.FieldsContainer() + >>> op.inputs.solution_in_modal_space.connect(my_solution_in_modal_space) + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) """ + def __init__(self, op: Operator): super().__init__(modal_superposition._spec().inputs, op) - self._modal_basis = Input(modal_superposition._spec().input_pin(0), 0, op, -1) + self._modal_basis = Input(modal_superposition._spec().input_pin(0), 0, op, -1) self._inputs.append(self._modal_basis) - self._solution_in_modal_space = Input(modal_superposition._spec().input_pin(1), 1, op, -1) + self._solution_in_modal_space = Input( + modal_superposition._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._solution_in_modal_space) - self._time_scoping = Input(modal_superposition._spec().input_pin(3), 3, op, -1) + self._time_scoping = Input(modal_superposition._spec().input_pin(3), 3, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(modal_superposition._spec().input_pin(4), 4, op, -1) + self._mesh_scoping = Input(modal_superposition._spec().input_pin(4), 4, op, -1) self._inputs.append(self._mesh_scoping) @property def modal_basis(self): - """Allows to connect modal_basis input to the operator + """Allows to connect modal_basis input to the operator. - - pindoc: one field by mode with each field representing a mode shape on nodes or elements + One field by mode with each field + representing a mode shape on nodes or + elements Parameters ---------- - my_modal_basis : FieldsContainer, + my_modal_basis : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.modal_superposition() >>> op.inputs.modal_basis.connect(my_modal_basis) - >>> #or + >>> # or >>> op.inputs.modal_basis(my_modal_basis) - """ return self._modal_basis @property def solution_in_modal_space(self): - """Allows to connect solution_in_modal_space input to the operator + """Allows to connect solution_in_modal_space input to the operator. - - pindoc: one field by time/frequency with each field having a ponderating coefficient for each mode of the modal_basis pin + One field by time/frequency with each field + having a ponderating coefficient for + each mode of the modal_basis pin Parameters ---------- - my_solution_in_modal_space : FieldsContainer, + my_solution_in_modal_space : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.modal_superposition() >>> op.inputs.solution_in_modal_space.connect(my_solution_in_modal_space) - >>> #or + >>> # or >>> op.inputs.solution_in_modal_space(my_solution_in_modal_space) - """ return self._solution_in_modal_space @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: this input allows to compute the result on a subset of the time frequency domain defined in the solution_in_modal_space fields container + This input allows to compute the result on a + subset of the time frequency domain + defined in the + solution_in_modal_space fields + container Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.modal_superposition() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: this input allows to compute the result on a subset of the space domain defined in the modal_basis fields container + This input allows to compute the result on a + subset of the space domain defined in + the modal_basis fields container Parameters ---------- - my_mesh_scoping : Scoping, ScopingsContainer, + my_mesh_scoping : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.modal_superposition() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + class OutputsModalSuperposition(_Outputs): - """Intermediate class used to get outputs from modal_superposition operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.modal_superposition() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + modal_superposition operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.modal_superposition() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(modal_superposition._spec().outputs, op) - self._fields_container = Output(modal_superposition._spec().output_pin(0), 0, op) + self._fields_container = Output( + modal_superposition._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.modal_superposition() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/modulus.py b/ansys/dpf/core/operators/math/modulus.py index 96df9ea038f..6bdc6dba6f0 100644 --- a/ansys/dpf/core/operators/math/modulus.py +++ b/ansys/dpf/core/operators/math/modulus.py @@ -1,60 +1,91 @@ """ modulus -======= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class modulus(Operator): - """Computes element-wise modulus of field containers containing complex fields. + """Computes element-wise modulus of field containers containing complex + fields. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.modulus() - >>> # Instantiate operator - >>> op = dpf.operators.math.modulus() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.modulus( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.modulus(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="modulus", config = config, server = server) + super().__init__(name="modulus", config=config, server=server) self._inputs = InputsModulus(self) self._outputs = OutputsModulus(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise modulus of field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes element-wise modulus of field containers containing complex + fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "modulus") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="modulus", server=server) @property def inputs(self): @@ -62,91 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsModulus + inputs : InputsModulus """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsModulus + outputs : OutputsModulus """ return super().outputs -#internal name: modulus -#scripting name: modulus class InputsModulus(_Inputs): - """Intermediate class used to connect user inputs to modulus operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.modulus() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + modulus operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.modulus() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(modulus._spec().inputs, op) - self._fields_container = Input(modulus._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(modulus._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.modulus() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsModulus(_Outputs): - """Intermediate class used to get outputs from modulus operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.modulus() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + modulus operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.modulus() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(modulus._spec().outputs, op) - self._fields_container = Output(modulus._spec().output_pin(0), 0, op) + self._fields_container = Output(modulus._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.modulus() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/norm.py b/ansys/dpf/core/operators/math/norm.py index 67db56908ca..b77b466af00 100644 --- a/ansys/dpf/core/operators/math/norm.py +++ b/ansys/dpf/core/operators/math/norm.py @@ -1,60 +1,94 @@ """ norm -==== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class norm(Operator): """Computes the element-wise L2 norm of the field elementary data. - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.norm() - >>> # Instantiate operator - >>> op = dpf.operators.math.norm() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.norm( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.norm(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="norm", config = config, server = server) + super().__init__(name="norm", config=config, server=server) self._inputs = InputsNorm(self) self._outputs = OutputsNorm(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise L2 norm of the field elementary data.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = ( + """Computes the element-wise L2 norm of the field elementary data.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "norm") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="norm", server=server) @property def inputs(self): @@ -62,93 +96,90 @@ def inputs(self): Returns -------- - inputs : InputsNorm + inputs : InputsNorm """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNorm + outputs : OutputsNorm """ return super().outputs -#internal name: norm -#scripting name: norm class InputsNorm(_Inputs): - """Intermediate class used to connect user inputs to norm operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.norm() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + norm operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.norm() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(norm._spec().inputs, op) - self._field = Input(norm._spec().input_pin(0), 0, op, -1) + self._field = Input(norm._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.norm() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsNorm(_Outputs): - """Intermediate class used to get outputs from norm operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.norm() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + norm operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.norm() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(norm._spec().outputs, op) - self._field = Output(norm._spec().output_pin(0), 0, op) + self._field = Output(norm._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.norm() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/norm_fc.py b/ansys/dpf/core/operators/math/norm_fc.py index 4b3261b151b..2a4ec1dfc62 100644 --- a/ansys/dpf/core/operators/math/norm_fc.py +++ b/ansys/dpf/core/operators/math/norm_fc.py @@ -1,60 +1,92 @@ """ norm_fc -======= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class norm_fc(Operator): - """Computes the element-wise L2 norm of the field elementary data. This process is applied on eah field of the input fields container. + """Computes the element-wise L2 norm of the field elementary data. This + process is applied on eah field of the input fields container. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.norm_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.norm_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.norm_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.norm_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="norm_fc", config = config, server = server) + super().__init__(name="norm_fc", config=config, server=server) self._inputs = InputsNormFc(self) self._outputs = OutputsNormFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes the element-wise L2 norm of the field elementary data. This process is applied on eah field of the input fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes the element-wise L2 norm of the field elementary data. This + process is applied on eah field of the input fields + container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "norm_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="norm_fc", server=server) @property def inputs(self): @@ -62,91 +94,87 @@ def inputs(self): Returns -------- - inputs : InputsNormFc + inputs : InputsNormFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNormFc + outputs : OutputsNormFc """ return super().outputs -#internal name: norm_fc -#scripting name: norm_fc class InputsNormFc(_Inputs): - """Intermediate class used to connect user inputs to norm_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.norm_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + norm_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.norm_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(norm_fc._spec().inputs, op) - self._fields_container = Input(norm_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(norm_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.norm_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsNormFc(_Outputs): - """Intermediate class used to get outputs from norm_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.norm_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + norm_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.norm_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(norm_fc._spec().outputs, op) - self._fields_container = Output(norm_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(norm_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.norm_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/overall_dot.py b/ansys/dpf/core/operators/math/overall_dot.py index 208fde1ccd0..56367fcf116 100644 --- a/ansys/dpf/core/operators/math/overall_dot.py +++ b/ansys/dpf/core/operators/math/overall_dot.py @@ -1,66 +1,104 @@ """ overall_dot -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class overall_dot(Operator): - """Compute a sdot product between two fields and return a scalar. + """Compute a dot product between two fields (fields are seen like a + single large vector) and return a scalar. + + Parameters + ---------- + fieldA : Field + fieldB : Field - available inputs: - - FieldA (Field) - - FieldB (Field) - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.overall_dot() - >>> # Instantiate operator - >>> op = dpf.operators.math.overall_dot() + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) - >>> # Make input connections - >>> my_FieldA = dpf.Field() - >>> op.inputs.FieldA.connect(my_FieldA) - >>> my_FieldB = dpf.Field() - >>> op.inputs.FieldB.connect(my_FieldB) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.overall_dot( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.overall_dot(FieldA=my_FieldA,FieldB=my_FieldB) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, FieldA=None, FieldB=None, config=None, server=None): - super().__init__(name="native::overall_dot", config = config, server = server) + def __init__(self, fieldA=None, fieldB=None, config=None, server=None): + super().__init__(name="native::overall_dot", config=config, server=server) self._inputs = InputsOverallDot(self) self._outputs = OutputsOverallDot(self) - if FieldA !=None: - self.inputs.FieldA.connect(FieldA) - if FieldB !=None: - self.inputs.FieldB.connect(FieldB) + if fieldA is not None: + self.inputs.fieldA.connect(fieldA) + if fieldB is not None: + self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Compute a sdot product between two fields and return a scalar.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "FieldA", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "FieldB", type_names=["field"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""Field defined on over-all location, contains a unique scalar value""")}) + description = """Compute a dot product between two fields (fields are seen like a + single large vector) and return a scalar.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""Field defined on over-all location, contains + a unique scalar value""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "native::overall_dot") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="native::overall_dot", server=server) @property def inputs(self): @@ -68,117 +106,109 @@ def inputs(self): Returns -------- - inputs : InputsOverallDot + inputs : InputsOverallDot """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsOverallDot + outputs : OutputsOverallDot """ return super().outputs -#internal name: native::overall_dot -#scripting name: overall_dot class InputsOverallDot(_Inputs): - """Intermediate class used to connect user inputs to overall_dot operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.overall_dot() - >>> my_FieldA = dpf.Field() - >>> op.inputs.FieldA.connect(my_FieldA) - >>> my_FieldB = dpf.Field() - >>> op.inputs.FieldB.connect(my_FieldB) + """Intermediate class used to connect user inputs to + overall_dot operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.overall_dot() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(overall_dot._spec().inputs, op) - self._FieldA = Input(overall_dot._spec().input_pin(0), 0, op, -1) - self._inputs.append(self._FieldA) - self._FieldB = Input(overall_dot._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._FieldB) + self._fieldA = Input(overall_dot._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fieldA) + self._fieldB = Input(overall_dot._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._fieldB) @property - def FieldA(self): - """Allows to connect FieldA input to the operator + def fieldA(self): + """Allows to connect fieldA input to the operator. Parameters ---------- - my_FieldA : Field, + my_fieldA : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.overall_dot() - >>> op.inputs.FieldA.connect(my_FieldA) - >>> #or - >>> op.inputs.FieldA(my_FieldA) - + >>> op.inputs.fieldA.connect(my_fieldA) + >>> # or + >>> op.inputs.fieldA(my_fieldA) """ - return self._FieldA + return self._fieldA @property - def FieldB(self): - """Allows to connect FieldB input to the operator + def fieldB(self): + """Allows to connect fieldB input to the operator. Parameters ---------- - my_FieldB : Field, + my_fieldB : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.overall_dot() - >>> op.inputs.FieldB.connect(my_FieldB) - >>> #or - >>> op.inputs.FieldB(my_FieldB) - + >>> op.inputs.fieldB.connect(my_fieldB) + >>> # or + >>> op.inputs.fieldB(my_fieldB) """ - return self._FieldB + return self._fieldB + class OutputsOverallDot(_Outputs): - """Intermediate class used to get outputs from overall_dot operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.overall_dot() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + overall_dot operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.overall_dot() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(overall_dot._spec().outputs, op) - self._field = Output(overall_dot._spec().output_pin(0), 0, op) + self._field = Output(overall_dot._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - - - pindoc: Field defined on over-all location, contains a unique scalar value - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.overall_dot() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/phase.py b/ansys/dpf/core/operators/math/phase.py index 17b9ba99f56..f5bb15c29aa 100644 --- a/ansys/dpf/core/operators/math/phase.py +++ b/ansys/dpf/core/operators/math/phase.py @@ -1,66 +1,109 @@ """ phase -===== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class phase(Operator): """Computes the phase (in rad) between a real and an imaginary field. - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.phase() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.phase(fieldA=my_fieldA,fieldB=my_fieldB) + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.phase() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.phase( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="phase", config = config, server = server) + super().__init__(name="phase", config=config, server=server) self._inputs = InputsPhase(self) self._outputs = OutputsPhase(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Computes the phase (in rad) between a real and an imaginary field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = ( + """Computes the phase (in rad) between a real and an imaginary field.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "phase") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="phase", server=server) @property def inputs(self): @@ -68,119 +111,115 @@ def inputs(self): Returns -------- - inputs : InputsPhase + inputs : InputsPhase """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPhase + outputs : OutputsPhase """ return super().outputs -#internal name: phase -#scripting name: phase class InputsPhase(_Inputs): - """Intermediate class used to connect user inputs to phase operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.phase() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + phase operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.phase() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(phase._spec().inputs, op) - self._fieldA = Input(phase._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(phase._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(phase._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(phase._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.phase() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.phase() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsPhase(_Outputs): - """Intermediate class used to get outputs from phase operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.phase() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + phase operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.phase() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(phase._spec().outputs, op) - self._field = Output(phase._spec().output_pin(0), 0, op) + self._field = Output(phase._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.phase() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/phase_fc.py b/ansys/dpf/core/operators/math/phase_fc.py index d9724857bbb..fefe1d60a27 100644 --- a/ansys/dpf/core/operators/math/phase_fc.py +++ b/ansys/dpf/core/operators/math/phase_fc.py @@ -1,60 +1,89 @@ """ phase_fc -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class phase_fc(Operator): """Computes phase (in rad) between real and imaginary fields. - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.phase_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.phase_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.phase_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.phase_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="phase_fc", config = config, server = server) + super().__init__(name="phase_fc", config=config, server=server) self._inputs = InputsPhaseFc(self) self._outputs = OutputsPhaseFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes phase (in rad) between real and imaginary fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes phase (in rad) between real and imaginary fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "phase_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="phase_fc", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsPhaseFc + inputs : InputsPhaseFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPhaseFc + outputs : OutputsPhaseFc """ return super().outputs -#internal name: phase_fc -#scripting name: phase_fc class InputsPhaseFc(_Inputs): - """Intermediate class used to connect user inputs to phase_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.phase_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + phase_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.phase_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(phase_fc._spec().inputs, op) - self._fields_container = Input(phase_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(phase_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.phase_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsPhaseFc(_Outputs): - """Intermediate class used to get outputs from phase_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.phase_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + phase_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.phase_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(phase_fc._spec().outputs, op) - self._fields_container = Output(phase_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(phase_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.phase_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/polar_to_cplx.py b/ansys/dpf/core/operators/math/polar_to_cplx.py index 604b6766a7b..a4a374fee1d 100644 --- a/ansys/dpf/core/operators/math/polar_to_cplx.py +++ b/ansys/dpf/core/operators/math/polar_to_cplx.py @@ -1,60 +1,89 @@ """ polar_to_cplx -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class polar_to_cplx(Operator): """Convert complex number from polar form to complex. - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.polar_to_cplx() - >>> # Instantiate operator - >>> op = dpf.operators.math.polar_to_cplx() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.polar_to_cplx( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.polar_to_cplx(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="polar_to_cplx", config = config, server = server) + super().__init__(name="polar_to_cplx", config=config, server=server) self._inputs = InputsPolarToCplx(self) self._outputs = OutputsPolarToCplx(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Convert complex number from polar form to complex.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Convert complex number from polar form to complex.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "polar_to_cplx") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="polar_to_cplx", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsPolarToCplx + inputs : InputsPolarToCplx """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPolarToCplx + outputs : OutputsPolarToCplx """ return super().outputs -#internal name: polar_to_cplx -#scripting name: polar_to_cplx class InputsPolarToCplx(_Inputs): - """Intermediate class used to connect user inputs to polar_to_cplx operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.polar_to_cplx() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + polar_to_cplx operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.polar_to_cplx() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(polar_to_cplx._spec().inputs, op) - self._fields_container = Input(polar_to_cplx._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(polar_to_cplx._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.polar_to_cplx() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsPolarToCplx(_Outputs): - """Intermediate class used to get outputs from polar_to_cplx operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.polar_to_cplx() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + polar_to_cplx operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.polar_to_cplx() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(polar_to_cplx._spec().outputs, op) - self._fields_container = Output(polar_to_cplx._spec().output_pin(0), 0, op) + self._fields_container = Output(polar_to_cplx._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.polar_to_cplx() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/pow.py b/ansys/dpf/core/operators/math/pow.py index 52cfea572bd..8dbdc147f67 100644 --- a/ansys/dpf/core/operators/math/pow.py +++ b/ansys/dpf/core/operators/math/pow.py @@ -1,66 +1,101 @@ """ pow -=== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class pow(Operator): """Computes element-wise field[i]^p. - available inputs: - - field (Field) - - factor (float) + Parameters + ---------- + field : Field + factor : float + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.pow() - >>> # Instantiate operator - >>> op = dpf.operators.math.pow() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_factor = float() - >>> op.inputs.factor.connect(my_factor) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.pow( + ... field=my_field, + ... factor=my_factor, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.pow(field=my_field,factor=my_factor) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, factor=None, config=None, server=None): - super().__init__(name="Pow", config = config, server = server) + super().__init__(name="Pow", config=config, server=server) self._inputs = InputsPow(self) self._outputs = OutputsPow(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if factor !=None: + if factor is not None: self.inputs.factor.connect(factor) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise field[i]^p.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "factor", type_names=["double"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes element-wise field[i]^p.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="factor", + type_names=["double"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "Pow") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="Pow", server=server) @property def inputs(self): @@ -68,115 +103,109 @@ def inputs(self): Returns -------- - inputs : InputsPow + inputs : InputsPow """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPow + outputs : OutputsPow """ return super().outputs -#internal name: Pow -#scripting name: pow class InputsPow(_Inputs): - """Intermediate class used to connect user inputs to pow operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.pow() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_factor = float() - >>> op.inputs.factor.connect(my_factor) + """Intermediate class used to connect user inputs to + pow operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.pow() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) """ + def __init__(self, op: Operator): super().__init__(pow._spec().inputs, op) - self._field = Input(pow._spec().input_pin(0), 0, op, -1) + self._field = Input(pow._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._factor = Input(pow._spec().input_pin(1), 1, op, -1) + self._factor = Input(pow._spec().input_pin(1), 1, op, -1) self._inputs.append(self._factor) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.pow() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def factor(self): - """Allows to connect factor input to the operator + """Allows to connect factor input to the operator. Parameters ---------- - my_factor : float, + my_factor : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.pow() >>> op.inputs.factor.connect(my_factor) - >>> #or + >>> # or >>> op.inputs.factor(my_factor) - """ return self._factor + class OutputsPow(_Outputs): - """Intermediate class used to get outputs from pow operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.pow() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + pow operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.pow() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(pow._spec().outputs, op) - self._field = Output(pow._spec().output_pin(0), 0, op) + self._field = Output(pow._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.pow() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/pow_fc.py b/ansys/dpf/core/operators/math/pow_fc.py index a540447bec1..1c64302628a 100644 --- a/ansys/dpf/core/operators/math/pow_fc.py +++ b/ansys/dpf/core/operators/math/pow_fc.py @@ -1,66 +1,101 @@ """ pow_fc -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class pow_fc(Operator): """Computes element-wise field[i]^p. - available inputs: - - fields_container (FieldsContainer) - - factor (float) + Parameters + ---------- + fields_container : FieldsContainer + factor : float + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.pow_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.pow_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_factor = float() - >>> op.inputs.factor.connect(my_factor) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.pow_fc( + ... fields_container=my_fields_container, + ... factor=my_factor, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.pow_fc(fields_container=my_fields_container,factor=my_factor) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, factor=None, config=None, server=None): - super().__init__(name="Pow_fc", config = config, server = server) + super().__init__(name="Pow_fc", config=config, server=server) self._inputs = InputsPowFc(self) self._outputs = OutputsPowFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if factor !=None: + if factor is not None: self.inputs.factor.connect(factor) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise field[i]^p.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "factor", type_names=["double"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes element-wise field[i]^p.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="factor", + type_names=["double"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "Pow_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="Pow_fc", server=server) @property def inputs(self): @@ -68,115 +103,109 @@ def inputs(self): Returns -------- - inputs : InputsPowFc + inputs : InputsPowFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPowFc + outputs : OutputsPowFc """ return super().outputs -#internal name: Pow_fc -#scripting name: pow_fc class InputsPowFc(_Inputs): - """Intermediate class used to connect user inputs to pow_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.pow_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_factor = float() - >>> op.inputs.factor.connect(my_factor) + """Intermediate class used to connect user inputs to + pow_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.pow_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) """ + def __init__(self, op: Operator): super().__init__(pow_fc._spec().inputs, op) - self._fields_container = Input(pow_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(pow_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._factor = Input(pow_fc._spec().input_pin(1), 1, op, -1) + self._factor = Input(pow_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._factor) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.pow_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def factor(self): - """Allows to connect factor input to the operator + """Allows to connect factor input to the operator. Parameters ---------- - my_factor : float, + my_factor : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.pow_fc() >>> op.inputs.factor.connect(my_factor) - >>> #or + >>> # or >>> op.inputs.factor(my_factor) - """ return self._factor + class OutputsPowFc(_Outputs): - """Intermediate class used to get outputs from pow_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.pow_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + pow_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.pow_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(pow_fc._spec().outputs, op) - self._fields_container = Output(pow_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(pow_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.pow_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/qr_solve.py b/ansys/dpf/core/operators/math/qr_solve.py index 00161f81be0..895838a6415 100644 --- a/ansys/dpf/core/operators/math/qr_solve.py +++ b/ansys/dpf/core/operators/math/qr_solve.py @@ -1,66 +1,103 @@ """ qr_solve -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Math plugin, from "math" category -""" class qr_solve(Operator): """computes the solution using QR factorization. - available inputs: - - fields_container (FieldsContainer) - - rhs (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + Fields_container + rhs : FieldsContainer + Fields_container + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.qr_solve() - >>> # Instantiate operator - >>> op = dpf.operators.math.qr_solve() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_rhs = dpf.FieldsContainer() + >>> op.inputs.rhs.connect(my_rhs) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_rhs = dpf.FieldsContainer() - >>> op.inputs.rhs.connect(my_rhs) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.qr_solve( + ... fields_container=my_fields_container, + ... rhs=my_rhs, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.qr_solve(fields_container=my_fields_container,rhs=my_rhs) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, rhs=None, config=None, server=None): - super().__init__(name="qrsolveOp", config = config, server = server) + super().__init__(name="qrsolveOp", config=config, server=server) self._inputs = InputsQrSolve(self) self._outputs = OutputsQrSolve(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if rhs !=None: + if rhs is not None: self.inputs.rhs.connect(rhs) @staticmethod def _spec(): - spec = Specification(description="""computes the solution using QR factorization.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""fields_container"""), - 1 : PinSpecification(name = "rhs", type_names=["fields_container"], optional=False, document="""fields_container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """computes the solution using QR factorization.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fields_container""", + ), + 1: PinSpecification( + name="rhs", + type_names=["fields_container"], + optional=False, + document="""Fields_container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "qrsolveOp") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="qrsolveOp", server=server) @property def inputs(self): @@ -68,119 +105,113 @@ def inputs(self): Returns -------- - inputs : InputsQrSolve + inputs : InputsQrSolve """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsQrSolve + outputs : OutputsQrSolve """ return super().outputs -#internal name: qrsolveOp -#scripting name: qr_solve class InputsQrSolve(_Inputs): - """Intermediate class used to connect user inputs to qr_solve operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.qr_solve() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_rhs = dpf.FieldsContainer() - >>> op.inputs.rhs.connect(my_rhs) + """Intermediate class used to connect user inputs to + qr_solve operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.qr_solve() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_rhs = dpf.FieldsContainer() + >>> op.inputs.rhs.connect(my_rhs) """ + def __init__(self, op: Operator): super().__init__(qr_solve._spec().inputs, op) - self._fields_container = Input(qr_solve._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(qr_solve._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._rhs = Input(qr_solve._spec().input_pin(1), 1, op, -1) + self._rhs = Input(qr_solve._spec().input_pin(1), 1, op, -1) self._inputs.append(self._rhs) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: fields_container + Fields_container Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.qr_solve() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def rhs(self): - """Allows to connect rhs input to the operator + """Allows to connect rhs input to the operator. - - pindoc: fields_container + Fields_container Parameters ---------- - my_rhs : FieldsContainer, + my_rhs : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.qr_solve() >>> op.inputs.rhs.connect(my_rhs) - >>> #or + >>> # or >>> op.inputs.rhs(my_rhs) - """ return self._rhs + class OutputsQrSolve(_Outputs): - """Intermediate class used to get outputs from qr_solve operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.qr_solve() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + qr_solve operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.qr_solve() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(qr_solve._spec().outputs, op) - self._fields_container = Output(qr_solve._spec().output_pin(0), 0, op) + self._fields_container = Output(qr_solve._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.qr_solve() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/real_part.py b/ansys/dpf/core/operators/math/real_part.py index 2f061f1666e..3147c110b3c 100644 --- a/ansys/dpf/core/operators/math/real_part.py +++ b/ansys/dpf/core/operators/math/real_part.py @@ -1,60 +1,91 @@ """ real_part -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class real_part(Operator): - """Extracts element-wise real part of field containers containing complex fields. + """Extracts element-wise real part of field containers containing complex + fields. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.real_part() - >>> # Instantiate operator - >>> op = dpf.operators.math.real_part() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.real_part( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.real_part(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="realP_part", config = config, server = server) + super().__init__(name="realP_part", config=config, server=server) self._inputs = InputsRealPart(self) self._outputs = OutputsRealPart(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Extracts element-wise real part of field containers containing complex fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Extracts element-wise real part of field containers containing complex + fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "realP_part") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="realP_part", server=server) @property def inputs(self): @@ -62,91 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsRealPart + inputs : InputsRealPart """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRealPart + outputs : OutputsRealPart """ return super().outputs -#internal name: realP_part -#scripting name: real_part class InputsRealPart(_Inputs): - """Intermediate class used to connect user inputs to real_part operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.real_part() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + real_part operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.real_part() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(real_part._spec().inputs, op) - self._fields_container = Input(real_part._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(real_part._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.real_part() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsRealPart(_Outputs): - """Intermediate class used to get outputs from real_part operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.real_part() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + real_part operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.real_part() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(real_part._spec().outputs, op) - self._fields_container = Output(real_part._spec().output_pin(0), 0, op) + self._fields_container = Output(real_part._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.real_part() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/scale.py b/ansys/dpf/core/operators/math/scale.py index 9cd032894b3..caac3a9f68c 100644 --- a/ansys/dpf/core/operators/math/scale.py +++ b/ansys/dpf/core/operators/math/scale.py @@ -1,72 +1,122 @@ """ scale -===== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class scale(Operator): """Scales a field by a constant factor. - available inputs: - - field (Field, FieldsContainer) - - ponderation (float, Field) - - boolean (bool) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.scale() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_ponderation = float() - >>> op.inputs.ponderation.connect(my_ponderation) - >>> my_boolean = bool() - >>> op.inputs.boolean.connect(my_boolean) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.scale(field=my_field,ponderation=my_ponderation,boolean=my_boolean) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + ponderation : float or Field + Double/field scoped on overall + boolean : bool, optional + Bool(optional, default false) if set to true, + output of scale is mane dimensionless + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.scale() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_ponderation = float() + >>> op.inputs.ponderation.connect(my_ponderation) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.scale( + ... field=my_field, + ... ponderation=my_ponderation, + ... boolean=my_boolean, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, ponderation=None, boolean=None, config=None, server=None): - super().__init__(name="scale", config = config, server = server) + def __init__( + self, field=None, ponderation=None, boolean=None, config=None, server=None + ): + super().__init__(name="scale", config=config, server=server) self._inputs = InputsScale(self) self._outputs = OutputsScale(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if ponderation !=None: + if ponderation is not None: self.inputs.ponderation.connect(ponderation) - if boolean !=None: + if boolean is not None: self.inputs.boolean.connect(boolean) @staticmethod def _spec(): - spec = Specification(description="""Scales a field by a constant factor.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "ponderation", type_names=["double","field"], optional=False, document="""Double/Field scoped on overall"""), - 2 : PinSpecification(name = "boolean", type_names=["bool"], optional=True, document="""bool(optional, default false) if set to true, output of scale is mane dimensionless""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Scales a field by a constant factor.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="ponderation", + type_names=["double", "field"], + optional=False, + document="""Double/field scoped on overall""", + ), + 2: PinSpecification( + name="boolean", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + output of scale is mane dimensionless""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scale") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scale", server=server) @property def inputs(self): @@ -74,145 +124,139 @@ def inputs(self): Returns -------- - inputs : InputsScale + inputs : InputsScale """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScale + outputs : OutputsScale """ return super().outputs -#internal name: scale -#scripting name: scale class InputsScale(_Inputs): - """Intermediate class used to connect user inputs to scale operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.scale() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_ponderation = float() - >>> op.inputs.ponderation.connect(my_ponderation) - >>> my_boolean = bool() - >>> op.inputs.boolean.connect(my_boolean) + """Intermediate class used to connect user inputs to + scale operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.scale() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_ponderation = float() + >>> op.inputs.ponderation.connect(my_ponderation) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) """ + def __init__(self, op: Operator): super().__init__(scale._spec().inputs, op) - self._field = Input(scale._spec().input_pin(0), 0, op, -1) + self._field = Input(scale._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._ponderation = Input(scale._spec().input_pin(1), 1, op, -1) + self._ponderation = Input(scale._spec().input_pin(1), 1, op, -1) self._inputs.append(self._ponderation) - self._boolean = Input(scale._spec().input_pin(2), 2, op, -1) + self._boolean = Input(scale._spec().input_pin(2), 2, op, -1) self._inputs.append(self._boolean) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def ponderation(self): - """Allows to connect ponderation input to the operator + """Allows to connect ponderation input to the operator. - - pindoc: Double/Field scoped on overall + Double/field scoped on overall Parameters ---------- - my_ponderation : float, Field, + my_ponderation : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale() >>> op.inputs.ponderation.connect(my_ponderation) - >>> #or + >>> # or >>> op.inputs.ponderation(my_ponderation) - """ return self._ponderation @property def boolean(self): - """Allows to connect boolean input to the operator + """Allows to connect boolean input to the operator. - - pindoc: bool(optional, default false) if set to true, output of scale is mane dimensionless + Bool(optional, default false) if set to true, + output of scale is mane dimensionless Parameters ---------- - my_boolean : bool, + my_boolean : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale() >>> op.inputs.boolean.connect(my_boolean) - >>> #or + >>> # or >>> op.inputs.boolean(my_boolean) - """ return self._boolean + class OutputsScale(_Outputs): - """Intermediate class used to get outputs from scale operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.scale() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + scale operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.scale() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(scale._spec().outputs, op) - self._field = Output(scale._spec().output_pin(0), 0, op) + self._field = Output(scale._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/scale_by_field.py b/ansys/dpf/core/operators/math/scale_by_field.py index 07c3365d992..03ea4b7946a 100644 --- a/ansys/dpf/core/operators/math/scale_by_field.py +++ b/ansys/dpf/core/operators/math/scale_by_field.py @@ -1,66 +1,111 @@ """ scale_by_field -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class scale_by_field(Operator): - """Scales a field (in 0) by a scalar field (in 1). If one field's scoping has 'overall' location, then these field's values are applied on the entire other field. - - available inputs: - - fieldA (Field, FieldsContainer) - - fieldB (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.scale_by_field() - - >>> # Make input connections - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.scale_by_field(fieldA=my_fieldA,fieldB=my_fieldB) + """Scales a field (in 0) by a scalar field (in 1). If one field's scoping + has 'overall' location, then these field's values are applied on + the entire other field. + + Parameters + ---------- + fieldA : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.scale_by_field() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.scale_by_field( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="scale_by_field", config = config, server = server) + super().__init__(name="scale_by_field", config=config, server=server) self._inputs = InputsScaleByField(self) self._outputs = OutputsScaleByField(self) - if fieldA !=None: + if fieldA is not None: self.inputs.fieldA.connect(fieldA) - if fieldB !=None: + if fieldB is not None: self.inputs.fieldB.connect(fieldB) @staticmethod def _spec(): - spec = Specification(description="""Scales a field (in 0) by a scalar field (in 1). If one field's scoping has 'overall' location, then these field's values are applied on the entire other field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Scales a field (in 0) by a scalar field (in 1). If one field's scoping + has 'overall' location, then these field's values are + applied on the entire other field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scale_by_field") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scale_by_field", server=server) @property def inputs(self): @@ -68,119 +113,115 @@ def inputs(self): Returns -------- - inputs : InputsScaleByField + inputs : InputsScaleByField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScaleByField + outputs : OutputsScaleByField """ return super().outputs -#internal name: scale_by_field -#scripting name: scale_by_field class InputsScaleByField(_Inputs): - """Intermediate class used to connect user inputs to scale_by_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.scale_by_field() - >>> my_fieldA = dpf.Field() - >>> op.inputs.fieldA.connect(my_fieldA) - >>> my_fieldB = dpf.Field() - >>> op.inputs.fieldB.connect(my_fieldB) + """Intermediate class used to connect user inputs to + scale_by_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.scale_by_field() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) """ + def __init__(self, op: Operator): super().__init__(scale_by_field._spec().inputs, op) - self._fieldA = Input(scale_by_field._spec().input_pin(0), 0, op, -1) + self._fieldA = Input(scale_by_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fieldA) - self._fieldB = Input(scale_by_field._spec().input_pin(1), 1, op, -1) + self._fieldB = Input(scale_by_field._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fieldB) @property def fieldA(self): - """Allows to connect fieldA input to the operator + """Allows to connect fieldA input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA : Field, FieldsContainer, + my_fieldA : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_by_field() >>> op.inputs.fieldA.connect(my_fieldA) - >>> #or + >>> # or >>> op.inputs.fieldA(my_fieldA) - """ return self._fieldA @property def fieldB(self): - """Allows to connect fieldB input to the operator + """Allows to connect fieldB input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB : Field, FieldsContainer, + my_fieldB : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_by_field() >>> op.inputs.fieldB.connect(my_fieldB) - >>> #or + >>> # or >>> op.inputs.fieldB(my_fieldB) - """ return self._fieldB + class OutputsScaleByField(_Outputs): - """Intermediate class used to get outputs from scale_by_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.scale_by_field() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + scale_by_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.scale_by_field() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(scale_by_field._spec().outputs, op) - self._field = Output(scale_by_field._spec().output_pin(0), 0, op) + self._field = Output(scale_by_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_by_field() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/scale_by_field_fc.py b/ansys/dpf/core/operators/math/scale_by_field_fc.py index b51ba440703..ace2ab231f4 100644 --- a/ansys/dpf/core/operators/math/scale_by_field_fc.py +++ b/ansys/dpf/core/operators/math/scale_by_field_fc.py @@ -1,66 +1,117 @@ """ scale_by_field_fc -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class scale_by_field_fc(Operator): - """Scales a field (in 0) by a scalar field (in 1). If one field's scoping has 'overall' location, then these field's values are applied on the entire other field. - - available inputs: - - field_or_fields_container_A (FieldsContainer) - - field_or_fields_container_B (FieldsContainer) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.scale_by_field_fc() - - >>> # Make input connections - >>> my_field_or_fields_container_A = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> my_field_or_fields_container_B = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.scale_by_field_fc(field_or_fields_container_A=my_field_or_fields_container_A,field_or_fields_container_B=my_field_or_fields_container_B) + """Scales a field (in 0) by a scalar field (in 1). If one field's scoping + has 'overall' location, then these field's values are applied on + the entire other field. + + Parameters + ---------- + field_or_fields_container_A : Field or FieldsContainer + Field or fields container with only one field + is expected + field_or_fields_container_B : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.scale_by_field_fc() + + >>> # Make input connections + >>> my_field_or_fields_container_A = dpf.Field() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> my_field_or_fields_container_B = dpf.Field() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.scale_by_field_fc( + ... field_or_fields_container_A=my_field_or_fields_container_A, + ... field_or_fields_container_B=my_field_or_fields_container_B, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, field_or_fields_container_A=None, field_or_fields_container_B=None, config=None, server=None): - super().__init__(name="scale_by_field_fc", config = config, server = server) + def __init__( + self, + field_or_fields_container_A=None, + field_or_fields_container_B=None, + config=None, + server=None, + ): + super().__init__(name="scale_by_field_fc", config=config, server=server) self._inputs = InputsScaleByFieldFc(self) self._outputs = OutputsScaleByFieldFc(self) - if field_or_fields_container_A !=None: + if field_or_fields_container_A is not None: self.inputs.field_or_fields_container_A.connect(field_or_fields_container_A) - if field_or_fields_container_B !=None: + if field_or_fields_container_B is not None: self.inputs.field_or_fields_container_B.connect(field_or_fields_container_B) @staticmethod def _spec(): - spec = Specification(description="""Scales a field (in 0) by a scalar field (in 1). If one field's scoping has 'overall' location, then these field's values are applied on the entire other field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field_or_fields_container_A", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "field_or_fields_container_B", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Scales a field (in 0) by a scalar field (in 1). If one field's scoping + has 'overall' location, then these field's values are + applied on the entire other field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field_or_fields_container_A", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="field_or_fields_container_B", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scale_by_field_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scale_by_field_fc", server=server) @property def inputs(self): @@ -68,119 +119,119 @@ def inputs(self): Returns -------- - inputs : InputsScaleByFieldFc + inputs : InputsScaleByFieldFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScaleByFieldFc + outputs : OutputsScaleByFieldFc """ return super().outputs -#internal name: scale_by_field_fc -#scripting name: scale_by_field_fc class InputsScaleByFieldFc(_Inputs): - """Intermediate class used to connect user inputs to scale_by_field_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.scale_by_field_fc() - >>> my_field_or_fields_container_A = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> my_field_or_fields_container_B = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + """Intermediate class used to connect user inputs to + scale_by_field_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.scale_by_field_fc() + >>> my_field_or_fields_container_A = dpf.Field() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> my_field_or_fields_container_B = dpf.Field() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) """ + def __init__(self, op: Operator): super().__init__(scale_by_field_fc._spec().inputs, op) - self._field_or_fields_container_A = Input(scale_by_field_fc._spec().input_pin(0), 0, op, -1) + self._field_or_fields_container_A = Input( + scale_by_field_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field_or_fields_container_A) - self._field_or_fields_container_B = Input(scale_by_field_fc._spec().input_pin(1), 1, op, -1) + self._field_or_fields_container_B = Input( + scale_by_field_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._field_or_fields_container_B) @property def field_or_fields_container_A(self): - """Allows to connect field_or_fields_container_A input to the operator + """Allows to connect field_or_fields_container_A input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field_or_fields_container_A : FieldsContainer, + my_field_or_fields_container_A : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_by_field_fc() >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) - >>> #or + >>> # or >>> op.inputs.field_or_fields_container_A(my_field_or_fields_container_A) - """ return self._field_or_fields_container_A @property def field_or_fields_container_B(self): - """Allows to connect field_or_fields_container_B input to the operator + """Allows to connect field_or_fields_container_B input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field_or_fields_container_B : FieldsContainer, + my_field_or_fields_container_B : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_by_field_fc() >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) - >>> #or + >>> # or >>> op.inputs.field_or_fields_container_B(my_field_or_fields_container_B) - """ return self._field_or_fields_container_B + class OutputsScaleByFieldFc(_Outputs): - """Intermediate class used to get outputs from scale_by_field_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.scale_by_field_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + scale_by_field_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.scale_by_field_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(scale_by_field_fc._spec().outputs, op) - self._fields_container = Output(scale_by_field_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(scale_by_field_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_by_field_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/scale_fc.py b/ansys/dpf/core/operators/math/scale_fc.py index d699b8defc3..73e3d641a94 100644 --- a/ansys/dpf/core/operators/math/scale_fc.py +++ b/ansys/dpf/core/operators/math/scale_fc.py @@ -1,72 +1,127 @@ """ scale_fc -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class scale_fc(Operator): """Scales a field by a constant factor. - available inputs: - - fields_container (FieldsContainer) - - ponderation (float, Field) - - boolean (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.scale_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_ponderation = float() - >>> op.inputs.ponderation.connect(my_ponderation) - >>> my_boolean = bool() - >>> op.inputs.boolean.connect(my_boolean) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.scale_fc(fields_container=my_fields_container,ponderation=my_ponderation,boolean=my_boolean) + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + ponderation : float or Field + Double/field scoped on overall + boolean : bool, optional + Bool(optional, default false) if set to true, + output of scale is mane dimensionless + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.scale_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_ponderation = float() + >>> op.inputs.ponderation.connect(my_ponderation) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.scale_fc( + ... fields_container=my_fields_container, + ... ponderation=my_ponderation, + ... boolean=my_boolean, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, ponderation=None, boolean=None, config=None, server=None): - super().__init__(name="scale_fc", config = config, server = server) + def __init__( + self, + fields_container=None, + ponderation=None, + boolean=None, + config=None, + server=None, + ): + super().__init__(name="scale_fc", config=config, server=server) self._inputs = InputsScaleFc(self) self._outputs = OutputsScaleFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if ponderation !=None: + if ponderation is not None: self.inputs.ponderation.connect(ponderation) - if boolean !=None: + if boolean is not None: self.inputs.boolean.connect(boolean) @staticmethod def _spec(): - spec = Specification(description="""Scales a field by a constant factor.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "ponderation", type_names=["double","field"], optional=False, document="""Double/Field scoped on overall"""), - 2 : PinSpecification(name = "boolean", type_names=["bool"], optional=True, document="""bool(optional, default false) if set to true, output of scale is mane dimensionless""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Scales a field by a constant factor.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="ponderation", + type_names=["double", "field"], + optional=False, + document="""Double/field scoped on overall""", + ), + 2: PinSpecification( + name="boolean", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + output of scale is mane dimensionless""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scale_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scale_fc", server=server) @property def inputs(self): @@ -74,145 +129,139 @@ def inputs(self): Returns -------- - inputs : InputsScaleFc + inputs : InputsScaleFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScaleFc + outputs : OutputsScaleFc """ return super().outputs -#internal name: scale_fc -#scripting name: scale_fc class InputsScaleFc(_Inputs): - """Intermediate class used to connect user inputs to scale_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.scale_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_ponderation = float() - >>> op.inputs.ponderation.connect(my_ponderation) - >>> my_boolean = bool() - >>> op.inputs.boolean.connect(my_boolean) + """Intermediate class used to connect user inputs to + scale_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.scale_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_ponderation = float() + >>> op.inputs.ponderation.connect(my_ponderation) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) """ + def __init__(self, op: Operator): super().__init__(scale_fc._spec().inputs, op) - self._fields_container = Input(scale_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(scale_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._ponderation = Input(scale_fc._spec().input_pin(1), 1, op, -1) + self._ponderation = Input(scale_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._ponderation) - self._boolean = Input(scale_fc._spec().input_pin(2), 2, op, -1) + self._boolean = Input(scale_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._boolean) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def ponderation(self): - """Allows to connect ponderation input to the operator + """Allows to connect ponderation input to the operator. - - pindoc: Double/Field scoped on overall + Double/field scoped on overall Parameters ---------- - my_ponderation : float, Field, + my_ponderation : float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_fc() >>> op.inputs.ponderation.connect(my_ponderation) - >>> #or + >>> # or >>> op.inputs.ponderation(my_ponderation) - """ return self._ponderation @property def boolean(self): - """Allows to connect boolean input to the operator + """Allows to connect boolean input to the operator. - - pindoc: bool(optional, default false) if set to true, output of scale is mane dimensionless + Bool(optional, default false) if set to true, + output of scale is mane dimensionless Parameters ---------- - my_boolean : bool, + my_boolean : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_fc() >>> op.inputs.boolean.connect(my_boolean) - >>> #or + >>> # or >>> op.inputs.boolean(my_boolean) - """ return self._boolean + class OutputsScaleFc(_Outputs): - """Intermediate class used to get outputs from scale_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.scale_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + scale_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.scale_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(scale_fc._spec().outputs, op) - self._fields_container = Output(scale_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(scale_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.scale_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/sin.py b/ansys/dpf/core/operators/math/sin.py index b14b9b09399..3ee0f8baab0 100644 --- a/ansys/dpf/core/operators/math/sin.py +++ b/ansys/dpf/core/operators/math/sin.py @@ -1,60 +1,89 @@ """ sin -=== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class sin(Operator): """Computes element-wise sin(field[i]). - available inputs: - - field (Field) + Parameters + ---------- + field : Field + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.sin() - >>> # Instantiate operator - >>> op = dpf.operators.math.sin() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.sin( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.sin(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="sin", config = config, server = server) + super().__init__(name="sin", config=config, server=server) self._inputs = InputsSin(self) self._outputs = OutputsSin(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise sin(field[i]).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes element-wise sin(field[i]).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "sin") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="sin", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsSin + inputs : InputsSin """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSin + outputs : OutputsSin """ return super().outputs -#internal name: sin -#scripting name: sin class InputsSin(_Inputs): - """Intermediate class used to connect user inputs to sin operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sin() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + sin operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sin() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(sin._spec().inputs, op) - self._field = Input(sin._spec().input_pin(0), 0, op, -1) + self._field = Input(sin._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sin() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsSin(_Outputs): - """Intermediate class used to get outputs from sin operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sin() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + sin operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sin() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(sin._spec().outputs, op) - self._field = Output(sin._spec().output_pin(0), 0, op) + self._field = Output(sin._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sin() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/sin_fc.py b/ansys/dpf/core/operators/math/sin_fc.py index 9a33e7045e5..d17c5a54c5d 100644 --- a/ansys/dpf/core/operators/math/sin_fc.py +++ b/ansys/dpf/core/operators/math/sin_fc.py @@ -1,60 +1,89 @@ """ sin_fc -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class sin_fc(Operator): """Computes element-wise sin(field[i]). - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.sin_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.sin_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.sin_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.sin_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="sin_fc", config = config, server = server) + super().__init__(name="sin_fc", config=config, server=server) self._inputs = InputsSinFc(self) self._outputs = OutputsSinFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise sin(field[i]).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes element-wise sin(field[i]).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "sin_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="sin_fc", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsSinFc + inputs : InputsSinFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSinFc + outputs : OutputsSinFc """ return super().outputs -#internal name: sin_fc -#scripting name: sin_fc class InputsSinFc(_Inputs): - """Intermediate class used to connect user inputs to sin_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sin_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + sin_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sin_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(sin_fc._spec().inputs, op) - self._fields_container = Input(sin_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(sin_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sin_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsSinFc(_Outputs): - """Intermediate class used to get outputs from sin_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sin_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + sin_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sin_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(sin_fc._spec().outputs, op) - self._fields_container = Output(sin_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(sin_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sin_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/sqr.py b/ansys/dpf/core/operators/math/sqr.py index e7f62cb086e..88b694db375 100644 --- a/ansys/dpf/core/operators/math/sqr.py +++ b/ansys/dpf/core/operators/math/sqr.py @@ -1,60 +1,92 @@ """ sqr -=== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class sqr(Operator): """Computes element-wise field[i]^2. - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.sqr() - >>> # Instantiate operator - >>> op = dpf.operators.math.sqr() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.sqr( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.sqr(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="sqr", config = config, server = server) + super().__init__(name="sqr", config=config, server=server) self._inputs = InputsSqr(self) self._outputs = OutputsSqr(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise field[i]^2.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes element-wise field[i]^2.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "sqr") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="sqr", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsSqr + inputs : InputsSqr """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSqr + outputs : OutputsSqr """ return super().outputs -#internal name: sqr -#scripting name: sqr class InputsSqr(_Inputs): - """Intermediate class used to connect user inputs to sqr operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sqr() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + sqr operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sqr() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(sqr._spec().inputs, op) - self._field = Input(sqr._spec().input_pin(0), 0, op, -1) + self._field = Input(sqr._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sqr() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsSqr(_Outputs): - """Intermediate class used to get outputs from sqr operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sqr() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + sqr operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sqr() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(sqr._spec().outputs, op) - self._field = Output(sqr._spec().output_pin(0), 0, op) + self._field = Output(sqr._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sqr() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/sqr_fc.py b/ansys/dpf/core/operators/math/sqr_fc.py index 32089b7ee18..b3485d166ee 100644 --- a/ansys/dpf/core/operators/math/sqr_fc.py +++ b/ansys/dpf/core/operators/math/sqr_fc.py @@ -1,60 +1,92 @@ """ sqr_fc -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class sqr_fc(Operator): """Computes element-wise field[i]^2. - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.sqr_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.sqr_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.sqr_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.sqr_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="sqr_fc", config = config, server = server) + super().__init__(name="sqr_fc", config=config, server=server) self._inputs = InputsSqrFc(self) self._outputs = OutputsSqrFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise field[i]^2.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes element-wise field[i]^2.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "sqr_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="sqr_fc", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsSqrFc + inputs : InputsSqrFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSqrFc + outputs : OutputsSqrFc """ return super().outputs -#internal name: sqr_fc -#scripting name: sqr_fc class InputsSqrFc(_Inputs): - """Intermediate class used to connect user inputs to sqr_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sqr_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + sqr_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sqr_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(sqr_fc._spec().inputs, op) - self._fields_container = Input(sqr_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(sqr_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sqr_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsSqrFc(_Outputs): - """Intermediate class used to get outputs from sqr_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sqr_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + sqr_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sqr_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(sqr_fc._spec().outputs, op) - self._fields_container = Output(sqr_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(sqr_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sqr_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/sqrt.py b/ansys/dpf/core/operators/math/sqrt.py index 85dacb85e5c..95e588c7b25 100644 --- a/ansys/dpf/core/operators/math/sqrt.py +++ b/ansys/dpf/core/operators/math/sqrt.py @@ -1,60 +1,92 @@ """ sqrt -==== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class sqrt(Operator): """Computes element-wise sqrt(field1). - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.sqrt() - >>> # Instantiate operator - >>> op = dpf.operators.math.sqrt() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.sqrt( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.sqrt(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="sqrt", config = config, server = server) + super().__init__(name="sqrt", config=config, server=server) self._inputs = InputsSqrt(self) self._outputs = OutputsSqrt(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise sqrt(field1).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Computes element-wise sqrt(field1).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "sqrt") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="sqrt", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsSqrt + inputs : InputsSqrt """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSqrt + outputs : OutputsSqrt """ return super().outputs -#internal name: sqrt -#scripting name: sqrt class InputsSqrt(_Inputs): - """Intermediate class used to connect user inputs to sqrt operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sqrt() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + sqrt operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sqrt() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(sqrt._spec().inputs, op) - self._field = Input(sqrt._spec().input_pin(0), 0, op, -1) + self._field = Input(sqrt._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sqrt() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsSqrt(_Outputs): - """Intermediate class used to get outputs from sqrt operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sqrt() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + sqrt operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sqrt() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(sqrt._spec().outputs, op) - self._field = Output(sqrt._spec().output_pin(0), 0, op) + self._field = Output(sqrt._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sqrt() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/sqrt_fc.py b/ansys/dpf/core/operators/math/sqrt_fc.py index 3f9273a5d46..6ee52ec6670 100644 --- a/ansys/dpf/core/operators/math/sqrt_fc.py +++ b/ansys/dpf/core/operators/math/sqrt_fc.py @@ -1,60 +1,92 @@ """ sqrt_fc -======= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class sqrt_fc(Operator): """Computes element-wise sqrt(field1). - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.sqrt_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.sqrt_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.sqrt_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.sqrt_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="sqrt_fc", config = config, server = server) + super().__init__(name="sqrt_fc", config=config, server=server) self._inputs = InputsSqrtFc(self) self._outputs = OutputsSqrtFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Computes element-wise sqrt(field1).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Computes element-wise sqrt(field1).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "sqrt_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="sqrt_fc", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsSqrtFc + inputs : InputsSqrtFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSqrtFc + outputs : OutputsSqrtFc """ return super().outputs -#internal name: sqrt_fc -#scripting name: sqrt_fc class InputsSqrtFc(_Inputs): - """Intermediate class used to connect user inputs to sqrt_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sqrt_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + sqrt_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sqrt_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(sqrt_fc._spec().inputs, op) - self._fields_container = Input(sqrt_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(sqrt_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sqrt_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsSqrtFc(_Outputs): - """Intermediate class used to get outputs from sqrt_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sqrt_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + sqrt_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sqrt_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(sqrt_fc._spec().outputs, op) - self._fields_container = Output(sqrt_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(sqrt_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sqrt_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/svd.py b/ansys/dpf/core/operators/math/svd.py index f95a9a6de11..7a5310a466e 100644 --- a/ansys/dpf/core/operators/math/svd.py +++ b/ansys/dpf/core/operators/math/svd.py @@ -1,60 +1,90 @@ """ svd -=== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Math plugin, from "math" category -""" class svd(Operator): """computes the complex matrix svd at a given fields container. - available inputs: - - fields_container (FieldsContainer) + Parameters + ---------- + fields_container : FieldsContainer + Fields_container + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.svd() - >>> # Instantiate operator - >>> op = dpf.operators.math.svd() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.svd( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.svd(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="svdOp", config = config, server = server) + super().__init__(name="svdOp", config=config, server=server) self._inputs = InputsSvd(self) self._outputs = OutputsSvd(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""computes the complex matrix svd at a given fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""fields_container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """computes the complex matrix svd at a given fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fields_container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "svdOp") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="svdOp", server=server) @property def inputs(self): @@ -62,93 +92,89 @@ def inputs(self): Returns -------- - inputs : InputsSvd + inputs : InputsSvd """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSvd + outputs : OutputsSvd """ return super().outputs -#internal name: svdOp -#scripting name: svd class InputsSvd(_Inputs): - """Intermediate class used to connect user inputs to svd operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.svd() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + svd operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.svd() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(svd._spec().inputs, op) - self._fields_container = Input(svd._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(svd._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: fields_container + Fields_container Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.svd() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsSvd(_Outputs): - """Intermediate class used to get outputs from svd operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.svd() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + svd operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.svd() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(svd._spec().outputs, op) - self._fields_container = Output(svd._spec().output_pin(0), 0, op) + self._fields_container = Output(svd._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.svd() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/sweeping_phase.py b/ansys/dpf/core/operators/math/sweeping_phase.py index 4a3b907ca3f..97d5fecf0fb 100644 --- a/ansys/dpf/core/operators/math/sweeping_phase.py +++ b/ansys/dpf/core/operators/math/sweeping_phase.py @@ -1,90 +1,175 @@ """ sweeping_phase -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class sweeping_phase(Operator): - """Shift the phase of a real and an imaginary fields (in 0 and 1) of a given angle (in 3) of unit (in 4). - - available inputs: - - real_field (Field, FieldsContainer) - - imaginary_field (Field, FieldsContainer) - - angle (float) - - unit_name (str) - - abs_value (bool) - - imaginary_part_null (bool) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.sweeping_phase() - - >>> # Make input connections - >>> my_real_field = dpf.Field() - >>> op.inputs.real_field.connect(my_real_field) - >>> my_imaginary_field = dpf.Field() - >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> my_angle = float() - >>> op.inputs.angle.connect(my_angle) - >>> my_unit_name = str() - >>> op.inputs.unit_name.connect(my_unit_name) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_imaginary_part_null = bool() - >>> op.inputs.imaginary_part_null.connect(my_imaginary_part_null) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.sweeping_phase(real_field=my_real_field,imaginary_field=my_imaginary_field,angle=my_angle,unit_name=my_unit_name,abs_value=my_abs_value,imaginary_part_null=my_imaginary_part_null) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, real_field=None, imaginary_field=None, angle=None, unit_name=None, abs_value=None, imaginary_part_null=None, config=None, server=None): - super().__init__(name="sweeping_phase", config = config, server = server) + """Shift the phase of a real and an imaginary fields (in 0 and 1) of a + given angle (in 3) of unit (in 4). + + Parameters + ---------- + real_field : Field or FieldsContainer + Field or fields container with only one field + is expected + imaginary_field : Field or FieldsContainer + Field or fields container with only one field + is expected + angle : float + unit_name : str + String unit + abs_value : bool + imaginary_part_null : bool + If the imaginary part field is empty and this + pin is true, then the imaginary part + is supposed to be 0 (default is + false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.sweeping_phase() + + >>> # Make input connections + >>> my_real_field = dpf.Field() + >>> op.inputs.real_field.connect(my_real_field) + >>> my_imaginary_field = dpf.Field() + >>> op.inputs.imaginary_field.connect(my_imaginary_field) + >>> my_angle = float() + >>> op.inputs.angle.connect(my_angle) + >>> my_unit_name = str() + >>> op.inputs.unit_name.connect(my_unit_name) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_imaginary_part_null = bool() + >>> op.inputs.imaginary_part_null.connect(my_imaginary_part_null) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.sweeping_phase( + ... real_field=my_real_field, + ... imaginary_field=my_imaginary_field, + ... angle=my_angle, + ... unit_name=my_unit_name, + ... abs_value=my_abs_value, + ... imaginary_part_null=my_imaginary_part_null, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + real_field=None, + imaginary_field=None, + angle=None, + unit_name=None, + abs_value=None, + imaginary_part_null=None, + config=None, + server=None, + ): + super().__init__(name="sweeping_phase", config=config, server=server) self._inputs = InputsSweepingPhase(self) self._outputs = OutputsSweepingPhase(self) - if real_field !=None: + if real_field is not None: self.inputs.real_field.connect(real_field) - if imaginary_field !=None: + if imaginary_field is not None: self.inputs.imaginary_field.connect(imaginary_field) - if angle !=None: + if angle is not None: self.inputs.angle.connect(angle) - if unit_name !=None: + if unit_name is not None: self.inputs.unit_name.connect(unit_name) - if abs_value !=None: + if abs_value is not None: self.inputs.abs_value.connect(abs_value) - if imaginary_part_null !=None: + if imaginary_part_null is not None: self.inputs.imaginary_part_null.connect(imaginary_part_null) @staticmethod def _spec(): - spec = Specification(description="""Shift the phase of a real and an imaginary fields (in 0 and 1) of a given angle (in 3) of unit (in 4).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "real_field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "imaginary_field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 2 : PinSpecification(name = "angle", type_names=["double"], optional=False, document=""""""), - 3 : PinSpecification(name = "unit_name", type_names=["string"], optional=False, document="""String Unit"""), - 4 : PinSpecification(name = "abs_value", type_names=["bool"], optional=False, document=""""""), - 5 : PinSpecification(name = "imaginary_part_null", type_names=["bool"], optional=False, document="""if the imaginary part field is empty and this pin is true, then the imaginary part is supposed to be 0 (default is false)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Shift the phase of a real and an imaginary fields (in 0 and 1) of a + given angle (in 3) of unit (in 4).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="real_field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="imaginary_field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 2: PinSpecification( + name="angle", + type_names=["double"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="unit_name", + type_names=["string"], + optional=False, + document="""String unit""", + ), + 4: PinSpecification( + name="abs_value", + type_names=["bool"], + optional=False, + document="""""", + ), + 5: PinSpecification( + name="imaginary_part_null", + type_names=["bool"], + optional=False, + document="""If the imaginary part field is empty and this + pin is true, then the imaginary part + is supposed to be 0 (default is + false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "sweeping_phase") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="sweeping_phase", server=server) @property def inputs(self): @@ -92,219 +177,212 @@ def inputs(self): Returns -------- - inputs : InputsSweepingPhase + inputs : InputsSweepingPhase """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSweepingPhase + outputs : OutputsSweepingPhase """ return super().outputs -#internal name: sweeping_phase -#scripting name: sweeping_phase class InputsSweepingPhase(_Inputs): - """Intermediate class used to connect user inputs to sweeping_phase operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sweeping_phase() - >>> my_real_field = dpf.Field() - >>> op.inputs.real_field.connect(my_real_field) - >>> my_imaginary_field = dpf.Field() - >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> my_angle = float() - >>> op.inputs.angle.connect(my_angle) - >>> my_unit_name = str() - >>> op.inputs.unit_name.connect(my_unit_name) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_imaginary_part_null = bool() - >>> op.inputs.imaginary_part_null.connect(my_imaginary_part_null) + """Intermediate class used to connect user inputs to + sweeping_phase operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sweeping_phase() + >>> my_real_field = dpf.Field() + >>> op.inputs.real_field.connect(my_real_field) + >>> my_imaginary_field = dpf.Field() + >>> op.inputs.imaginary_field.connect(my_imaginary_field) + >>> my_angle = float() + >>> op.inputs.angle.connect(my_angle) + >>> my_unit_name = str() + >>> op.inputs.unit_name.connect(my_unit_name) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_imaginary_part_null = bool() + >>> op.inputs.imaginary_part_null.connect(my_imaginary_part_null) """ + def __init__(self, op: Operator): super().__init__(sweeping_phase._spec().inputs, op) - self._real_field = Input(sweeping_phase._spec().input_pin(0), 0, op, -1) + self._real_field = Input(sweeping_phase._spec().input_pin(0), 0, op, -1) self._inputs.append(self._real_field) - self._imaginary_field = Input(sweeping_phase._spec().input_pin(1), 1, op, -1) + self._imaginary_field = Input(sweeping_phase._spec().input_pin(1), 1, op, -1) self._inputs.append(self._imaginary_field) - self._angle = Input(sweeping_phase._spec().input_pin(2), 2, op, -1) + self._angle = Input(sweeping_phase._spec().input_pin(2), 2, op, -1) self._inputs.append(self._angle) - self._unit_name = Input(sweeping_phase._spec().input_pin(3), 3, op, -1) + self._unit_name = Input(sweeping_phase._spec().input_pin(3), 3, op, -1) self._inputs.append(self._unit_name) - self._abs_value = Input(sweeping_phase._spec().input_pin(4), 4, op, -1) + self._abs_value = Input(sweeping_phase._spec().input_pin(4), 4, op, -1) self._inputs.append(self._abs_value) - self._imaginary_part_null = Input(sweeping_phase._spec().input_pin(5), 5, op, -1) + self._imaginary_part_null = Input( + sweeping_phase._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._imaginary_part_null) @property def real_field(self): - """Allows to connect real_field input to the operator + """Allows to connect real_field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_real_field : Field, FieldsContainer, + my_real_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase() >>> op.inputs.real_field.connect(my_real_field) - >>> #or + >>> # or >>> op.inputs.real_field(my_real_field) - """ return self._real_field @property def imaginary_field(self): - """Allows to connect imaginary_field input to the operator + """Allows to connect imaginary_field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_imaginary_field : Field, FieldsContainer, + my_imaginary_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase() >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> #or + >>> # or >>> op.inputs.imaginary_field(my_imaginary_field) - """ return self._imaginary_field @property def angle(self): - """Allows to connect angle input to the operator + """Allows to connect angle input to the operator. Parameters ---------- - my_angle : float, + my_angle : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase() >>> op.inputs.angle.connect(my_angle) - >>> #or + >>> # or >>> op.inputs.angle(my_angle) - """ return self._angle @property def unit_name(self): - """Allows to connect unit_name input to the operator + """Allows to connect unit_name input to the operator. - - pindoc: String Unit + String unit Parameters ---------- - my_unit_name : str, + my_unit_name : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase() >>> op.inputs.unit_name.connect(my_unit_name) - >>> #or + >>> # or >>> op.inputs.unit_name(my_unit_name) - """ return self._unit_name @property def abs_value(self): - """Allows to connect abs_value input to the operator + """Allows to connect abs_value input to the operator. Parameters ---------- - my_abs_value : bool, + my_abs_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase() >>> op.inputs.abs_value.connect(my_abs_value) - >>> #or + >>> # or >>> op.inputs.abs_value(my_abs_value) - """ return self._abs_value @property def imaginary_part_null(self): - """Allows to connect imaginary_part_null input to the operator + """Allows to connect imaginary_part_null input to the operator. - - pindoc: if the imaginary part field is empty and this pin is true, then the imaginary part is supposed to be 0 (default is false) + If the imaginary part field is empty and this + pin is true, then the imaginary part + is supposed to be 0 (default is + false) Parameters ---------- - my_imaginary_part_null : bool, + my_imaginary_part_null : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase() >>> op.inputs.imaginary_part_null.connect(my_imaginary_part_null) - >>> #or + >>> # or >>> op.inputs.imaginary_part_null(my_imaginary_part_null) - """ return self._imaginary_part_null + class OutputsSweepingPhase(_Outputs): - """Intermediate class used to get outputs from sweeping_phase operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sweeping_phase() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + sweeping_phase operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sweeping_phase() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(sweeping_phase._spec().outputs, op) - self._field = Output(sweeping_phase._spec().output_pin(0), 0, op) + self._field = Output(sweeping_phase._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/math/sweeping_phase_fc.py b/ansys/dpf/core/operators/math/sweeping_phase_fc.py index f1fff2ce82e..2f6ae55e3a8 100644 --- a/ansys/dpf/core/operators/math/sweeping_phase_fc.py +++ b/ansys/dpf/core/operators/math/sweeping_phase_fc.py @@ -1,78 +1,137 @@ """ sweeping_phase_fc -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class sweeping_phase_fc(Operator): - """Shift the phase of all the corresponding real and imaginary fields of a fields container for a given angle (in 2) of unit (in 4). - - available inputs: - - fields_container (FieldsContainer) - - angle (float) - - unit_name (str) - - abs_value (bool) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.sweeping_phase_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_angle = float() - >>> op.inputs.angle.connect(my_angle) - >>> my_unit_name = str() - >>> op.inputs.unit_name.connect(my_unit_name) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.sweeping_phase_fc(fields_container=my_fields_container,angle=my_angle,unit_name=my_unit_name,abs_value=my_abs_value) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, angle=None, unit_name=None, abs_value=None, config=None, server=None): - super().__init__(name="sweeping_phase_fc", config = config, server = server) + """Shift the phase of all the corresponding real and imaginary fields of + a fields container for a given angle (in 2) of unit (in 4). + + Parameters + ---------- + fields_container : FieldsContainer + angle : float + unit_name : str + String unit + abs_value : bool + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.sweeping_phase_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_angle = float() + >>> op.inputs.angle.connect(my_angle) + >>> my_unit_name = str() + >>> op.inputs.unit_name.connect(my_unit_name) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.sweeping_phase_fc( + ... fields_container=my_fields_container, + ... angle=my_angle, + ... unit_name=my_unit_name, + ... abs_value=my_abs_value, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + angle=None, + unit_name=None, + abs_value=None, + config=None, + server=None, + ): + super().__init__(name="sweeping_phase_fc", config=config, server=server) self._inputs = InputsSweepingPhaseFc(self) self._outputs = OutputsSweepingPhaseFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if angle !=None: + if angle is not None: self.inputs.angle.connect(angle) - if unit_name !=None: + if unit_name is not None: self.inputs.unit_name.connect(unit_name) - if abs_value !=None: + if abs_value is not None: self.inputs.abs_value.connect(abs_value) @staticmethod def _spec(): - spec = Specification(description="""Shift the phase of all the corresponding real and imaginary fields of a fields container for a given angle (in 2) of unit (in 4).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "angle", type_names=["double"], optional=False, document=""""""), - 3 : PinSpecification(name = "unit_name", type_names=["string"], optional=False, document="""String Unit"""), - 4 : PinSpecification(name = "abs_value", type_names=["bool"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Shift the phase of all the corresponding real and imaginary fields of + a fields container for a given angle (in 2) of unit (in + 4).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="angle", + type_names=["double"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="unit_name", + type_names=["string"], + optional=False, + document="""String unit""", + ), + 4: PinSpecification( + name="abs_value", + type_names=["bool"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "sweeping_phase_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="sweeping_phase_fc", server=server) @property def inputs(self): @@ -80,165 +139,157 @@ def inputs(self): Returns -------- - inputs : InputsSweepingPhaseFc + inputs : InputsSweepingPhaseFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSweepingPhaseFc + outputs : OutputsSweepingPhaseFc """ return super().outputs -#internal name: sweeping_phase_fc -#scripting name: sweeping_phase_fc class InputsSweepingPhaseFc(_Inputs): - """Intermediate class used to connect user inputs to sweeping_phase_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sweeping_phase_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_angle = float() - >>> op.inputs.angle.connect(my_angle) - >>> my_unit_name = str() - >>> op.inputs.unit_name.connect(my_unit_name) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) + """Intermediate class used to connect user inputs to + sweeping_phase_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sweeping_phase_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_angle = float() + >>> op.inputs.angle.connect(my_angle) + >>> my_unit_name = str() + >>> op.inputs.unit_name.connect(my_unit_name) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) """ + def __init__(self, op: Operator): super().__init__(sweeping_phase_fc._spec().inputs, op) - self._fields_container = Input(sweeping_phase_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + sweeping_phase_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._angle = Input(sweeping_phase_fc._spec().input_pin(2), 2, op, -1) + self._angle = Input(sweeping_phase_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._angle) - self._unit_name = Input(sweeping_phase_fc._spec().input_pin(3), 3, op, -1) + self._unit_name = Input(sweeping_phase_fc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._unit_name) - self._abs_value = Input(sweeping_phase_fc._spec().input_pin(4), 4, op, -1) + self._abs_value = Input(sweeping_phase_fc._spec().input_pin(4), 4, op, -1) self._inputs.append(self._abs_value) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def angle(self): - """Allows to connect angle input to the operator + """Allows to connect angle input to the operator. Parameters ---------- - my_angle : float, + my_angle : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase_fc() >>> op.inputs.angle.connect(my_angle) - >>> #or + >>> # or >>> op.inputs.angle(my_angle) - """ return self._angle @property def unit_name(self): - """Allows to connect unit_name input to the operator + """Allows to connect unit_name input to the operator. - - pindoc: String Unit + String unit Parameters ---------- - my_unit_name : str, + my_unit_name : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase_fc() >>> op.inputs.unit_name.connect(my_unit_name) - >>> #or + >>> # or >>> op.inputs.unit_name(my_unit_name) - """ return self._unit_name @property def abs_value(self): - """Allows to connect abs_value input to the operator + """Allows to connect abs_value input to the operator. Parameters ---------- - my_abs_value : bool, + my_abs_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase_fc() >>> op.inputs.abs_value.connect(my_abs_value) - >>> #or + >>> # or >>> op.inputs.abs_value(my_abs_value) - """ return self._abs_value + class OutputsSweepingPhaseFc(_Outputs): - """Intermediate class used to get outputs from sweeping_phase_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.sweeping_phase_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + sweeping_phase_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.sweeping_phase_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(sweeping_phase_fc._spec().outputs, op) - self._fields_container = Output(sweeping_phase_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(sweeping_phase_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.sweeping_phase_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/math/unit_convert.py b/ansys/dpf/core/operators/math/unit_convert.py index b3a551b8abe..9e1728fd130 100644 --- a/ansys/dpf/core/operators/math/unit_convert.py +++ b/ansys/dpf/core/operators/math/unit_convert.py @@ -1,66 +1,120 @@ """ unit_convert -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class unit_convert(Operator): - """Convert an input field/fields container or mesh of a given unit to another unit. - - available inputs: - - entity_to_convert (Field, FieldsContainer, MeshedRegion, MeshesContainer) - - unit_name (str) - - available outputs: - - converted_entity (Field ,FieldsContainer ,MeshedRegion ,MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.math.unit_convert() - - >>> # Make input connections - >>> my_entity_to_convert = dpf.Field() - >>> op.inputs.entity_to_convert.connect(my_entity_to_convert) - >>> my_unit_name = str() - >>> op.inputs.unit_name.connect(my_unit_name) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.unit_convert(entity_to_convert=my_entity_to_convert,unit_name=my_unit_name) + """Convert an input field/fields container or mesh of a given unit to + another unit. + + Parameters + ---------- + entity_to_convert : Field or FieldsContainer or MeshedRegion or MeshesContainer + unit_name : str + Unit as a string, ex 'm' for meter, 'pa' for + pascal,... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.unit_convert() + + >>> # Make input connections + >>> my_entity_to_convert = dpf.Field() + >>> op.inputs.entity_to_convert.connect(my_entity_to_convert) + >>> my_unit_name = str() + >>> op.inputs.unit_name.connect(my_unit_name) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.unit_convert( + ... entity_to_convert=my_entity_to_convert, + ... unit_name=my_unit_name, + ... ) + + >>> # Get output data + >>> result_converted_entity = op.outputs.converted_entity() + """ - >>> # Get output data - >>> result_converted_entity = op.outputs.converted_entity()""" - def __init__(self, entity_to_convert=None, unit_name=None, config=None, server=None): - super().__init__(name="unit_convert", config = config, server = server) + def __init__( + self, entity_to_convert=None, unit_name=None, config=None, server=None + ): + super().__init__(name="unit_convert", config=config, server=server) self._inputs = InputsUnitConvert(self) self._outputs = OutputsUnitConvert(self) - if entity_to_convert !=None: + if entity_to_convert is not None: self.inputs.entity_to_convert.connect(entity_to_convert) - if unit_name !=None: + if unit_name is not None: self.inputs.unit_name.connect(unit_name) @staticmethod def _spec(): - spec = Specification(description="""Convert an input field/fields container or mesh of a given unit to another unit.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "entity_to_convert", type_names=["field","fields_container","abstract_meshed_region","meshes_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "unit_name", type_names=["string"], optional=False, document="""unit as a string, ex 'm' for meter, 'Pa' for pascal,...""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "converted_entity", type_names=["field","fields_container","abstract_meshed_region","meshes_container"], optional=False, document="""the output entity is the same as the input (inplace operator)""")}) + description = """Convert an input field/fields container or mesh of a given unit to + another unit.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="entity_to_convert", + type_names=[ + "field", + "fields_container", + "abstract_meshed_region", + "meshes_container", + ], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="unit_name", + type_names=["string"], + optional=False, + document="""Unit as a string, ex 'm' for meter, 'pa' for + pascal,...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="converted_entity", + type_names=[ + "field", + "fields_container", + "abstract_meshed_region", + "meshes_container", + ], + optional=False, + document="""The output entity is the same as the input + (inplace operator)""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "unit_convert") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="unit_convert", server=server) @property def inputs(self): @@ -68,104 +122,125 @@ def inputs(self): Returns -------- - inputs : InputsUnitConvert + inputs : InputsUnitConvert """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsUnitConvert + outputs : OutputsUnitConvert """ return super().outputs -#internal name: unit_convert -#scripting name: unit_convert class InputsUnitConvert(_Inputs): - """Intermediate class used to connect user inputs to unit_convert operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.unit_convert() - >>> my_entity_to_convert = dpf.Field() - >>> op.inputs.entity_to_convert.connect(my_entity_to_convert) - >>> my_unit_name = str() - >>> op.inputs.unit_name.connect(my_unit_name) + """Intermediate class used to connect user inputs to + unit_convert operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.unit_convert() + >>> my_entity_to_convert = dpf.Field() + >>> op.inputs.entity_to_convert.connect(my_entity_to_convert) + >>> my_unit_name = str() + >>> op.inputs.unit_name.connect(my_unit_name) """ + def __init__(self, op: Operator): super().__init__(unit_convert._spec().inputs, op) - self._entity_to_convert = Input(unit_convert._spec().input_pin(0), 0, op, -1) + self._entity_to_convert = Input(unit_convert._spec().input_pin(0), 0, op, -1) self._inputs.append(self._entity_to_convert) - self._unit_name = Input(unit_convert._spec().input_pin(1), 1, op, -1) + self._unit_name = Input(unit_convert._spec().input_pin(1), 1, op, -1) self._inputs.append(self._unit_name) @property def entity_to_convert(self): - """Allows to connect entity_to_convert input to the operator + """Allows to connect entity_to_convert input to the operator. Parameters ---------- - my_entity_to_convert : Field, FieldsContainer, MeshedRegion, MeshesContainer, + my_entity_to_convert : Field or FieldsContainer or MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.unit_convert() >>> op.inputs.entity_to_convert.connect(my_entity_to_convert) - >>> #or + >>> # or >>> op.inputs.entity_to_convert(my_entity_to_convert) - """ return self._entity_to_convert @property def unit_name(self): - """Allows to connect unit_name input to the operator + """Allows to connect unit_name input to the operator. - - pindoc: unit as a string, ex 'm' for meter, 'Pa' for pascal,... + Unit as a string, ex 'm' for meter, 'pa' for + pascal,... Parameters ---------- - my_unit_name : str, + my_unit_name : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.unit_convert() >>> op.inputs.unit_name.connect(my_unit_name) - >>> #or + >>> # or >>> op.inputs.unit_name(my_unit_name) - """ return self._unit_name + class OutputsUnitConvert(_Outputs): - """Intermediate class used to get outputs from unit_convert operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.unit_convert() - >>> # Connect inputs : op.inputs. ... - >>> result_converted_entity = op.outputs.converted_entity() + """Intermediate class used to get outputs from + unit_convert operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.unit_convert() + >>> # Connect inputs : op.inputs. ... + >>> result_converted_entity = op.outputs.converted_entity() """ + def __init__(self, op: Operator): super().__init__(unit_convert._spec().outputs, op) - self.converted_entity_as_field = Output( _modify_output_spec_with_one_type(unit_convert._spec().output_pin(0), "field"), 0, op) + self.converted_entity_as_field = Output( + _modify_output_spec_with_one_type( + unit_convert._spec().output_pin(0), "field" + ), + 0, + op, + ) self._outputs.append(self.converted_entity_as_field) - self.converted_entity_as_fields_container = Output( _modify_output_spec_with_one_type(unit_convert._spec().output_pin(0), "fields_container"), 0, op) + self.converted_entity_as_fields_container = Output( + _modify_output_spec_with_one_type( + unit_convert._spec().output_pin(0), "fields_container" + ), + 0, + op, + ) self._outputs.append(self.converted_entity_as_fields_container) - self.converted_entity_as_meshed_region = Output( _modify_output_spec_with_one_type(unit_convert._spec().output_pin(0), "abstract_meshed_region"), 0, op) + self.converted_entity_as_meshed_region = Output( + _modify_output_spec_with_one_type( + unit_convert._spec().output_pin(0), "meshed_region" + ), + 0, + op, + ) self._outputs.append(self.converted_entity_as_meshed_region) - self.converted_entity_as_meshes_container = Output( _modify_output_spec_with_one_type(unit_convert._spec().output_pin(0), "meshes_container"), 0, op) + self.converted_entity_as_meshes_container = Output( + _modify_output_spec_with_one_type( + unit_convert._spec().output_pin(0), "meshes_container" + ), + 0, + op, + ) self._outputs.append(self.converted_entity_as_meshes_container) - diff --git a/ansys/dpf/core/operators/math/unit_convert_fc.py b/ansys/dpf/core/operators/math/unit_convert_fc.py index d161136b2b8..22124c8336f 100644 --- a/ansys/dpf/core/operators/math/unit_convert_fc.py +++ b/ansys/dpf/core/operators/math/unit_convert_fc.py @@ -1,66 +1,106 @@ """ unit_convert_fc =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "math" category -""" class unit_convert_fc(Operator): """Convert an input fields container of a given unit to another unit. - available inputs: - - fields_container (FieldsContainer) - - unit_name (str) + Parameters + ---------- + fields_container : FieldsContainer + unit_name : str + Unit as a string, ex 'm' for meter, 'pa' for + pascal,... + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.math.unit_convert_fc() - >>> # Instantiate operator - >>> op = dpf.operators.math.unit_convert_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_unit_name = str() + >>> op.inputs.unit_name.connect(my_unit_name) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_unit_name = str() - >>> op.inputs.unit_name.connect(my_unit_name) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.unit_convert_fc( + ... fields_container=my_fields_container, + ... unit_name=my_unit_name, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.math.unit_convert_fc(fields_container=my_fields_container,unit_name=my_unit_name) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, unit_name=None, config=None, server=None): - super().__init__(name="unit_convert_fc", config = config, server = server) + super().__init__(name="unit_convert_fc", config=config, server=server) self._inputs = InputsUnitConvertFc(self) self._outputs = OutputsUnitConvertFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if unit_name !=None: + if unit_name is not None: self.inputs.unit_name.connect(unit_name) @staticmethod def _spec(): - spec = Specification(description="""Convert an input fields container of a given unit to another unit.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "unit_name", type_names=["string"], optional=False, document="""unit as a string, ex 'm' for meter, 'Pa' for pascal,...""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = ( + """Convert an input fields container of a given unit to another unit.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="unit_name", + type_names=["string"], + optional=False, + document="""Unit as a string, ex 'm' for meter, 'pa' for + pascal,...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "unit_convert_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="unit_convert_fc", server=server) @property def inputs(self): @@ -68,117 +108,112 @@ def inputs(self): Returns -------- - inputs : InputsUnitConvertFc + inputs : InputsUnitConvertFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsUnitConvertFc + outputs : OutputsUnitConvertFc """ return super().outputs -#internal name: unit_convert_fc -#scripting name: unit_convert_fc class InputsUnitConvertFc(_Inputs): - """Intermediate class used to connect user inputs to unit_convert_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.unit_convert_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_unit_name = str() - >>> op.inputs.unit_name.connect(my_unit_name) + """Intermediate class used to connect user inputs to + unit_convert_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.unit_convert_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_unit_name = str() + >>> op.inputs.unit_name.connect(my_unit_name) """ + def __init__(self, op: Operator): super().__init__(unit_convert_fc._spec().inputs, op) - self._fields_container = Input(unit_convert_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(unit_convert_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._unit_name = Input(unit_convert_fc._spec().input_pin(1), 1, op, -1) + self._unit_name = Input(unit_convert_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._unit_name) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.unit_convert_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def unit_name(self): - """Allows to connect unit_name input to the operator + """Allows to connect unit_name input to the operator. - - pindoc: unit as a string, ex 'm' for meter, 'Pa' for pascal,... + Unit as a string, ex 'm' for meter, 'pa' for + pascal,... Parameters ---------- - my_unit_name : str, + my_unit_name : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.unit_convert_fc() >>> op.inputs.unit_name.connect(my_unit_name) - >>> #or + >>> # or >>> op.inputs.unit_name(my_unit_name) - """ return self._unit_name + class OutputsUnitConvertFc(_Outputs): - """Intermediate class used to get outputs from unit_convert_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.math.unit_convert_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + unit_convert_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.unit_convert_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(unit_convert_fc._spec().outputs, op) - self._fields_container = Output(unit_convert_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(unit_convert_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.math.unit_convert_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/mesh/__init__.py b/ansys/dpf/core/operators/mesh/__init__.py index 0dacf3f7122..bcc136ea4ec 100644 --- a/ansys/dpf/core/operators/mesh/__init__.py +++ b/ansys/dpf/core/operators/mesh/__init__.py @@ -1,12 +1,15 @@ from .node_coordinates import node_coordinates from .from_field import from_field from .mesh_provider import mesh_provider -from .points_from_coordinates import points_from_coordinates +from .beam_properties import beam_properties from .split_mesh import split_mesh from .from_scoping import from_scoping from .split_fields import split_fields +from .points_from_coordinates import points_from_coordinates from .tri_mesh_skin import tri_mesh_skin from .mesh_cut import mesh_cut from .external_layer import external_layer from .skin import skin +from .mesh_to_graphics import mesh_to_graphics +from .mesh_to_graphics_edges import mesh_to_graphics_edges from .stl_export import stl_export diff --git a/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py b/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py new file mode 100644 index 00000000000..a491076ba48 --- /dev/null +++ b/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py @@ -0,0 +1,271 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:17. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class acmo_mesh_provider(Operator): + """Read a mesh from result files and cure degenerated elements + + Parameters + ---------- + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + read_cyclic : int, optional + If 1 cyclic symmetry is ignored, if 2 cyclic + expansion is done (default is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.acmo_mesh_provider() + + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.acmo_mesh_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_mesh = op.outputs.mesh() + """ + + def __init__( + self, + streams_container=None, + data_sources=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="acmo::acmo::MeshProvider", config=config, server=server) + self._inputs = InputsAcmoMeshProvider(self) + self._outputs = OutputsAcmoMeshProvider(self) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read a mesh from result files and cure degenerated elements""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 1 cyclic symmetry is ignored, if 2 cyclic + expansion is done (default is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="acmo::acmo::MeshProvider", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsAcmoMeshProvider + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsAcmoMeshProvider + """ + return super().outputs + + +class InputsAcmoMeshProvider(_Inputs): + """Intermediate class used to connect user inputs to + acmo_mesh_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.acmo_mesh_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(acmo_mesh_provider._spec().inputs, op) + self._streams_container = Input( + acmo_mesh_provider._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(acmo_mesh_provider._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._read_cyclic = Input(acmo_mesh_provider._spec().input_pin(14), 14, op, -1) + self._inputs.append(self._read_cyclic) + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.acmo_mesh_provider() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.acmo_mesh_provider() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 1 cyclic symmetry is ignored, if 2 cyclic + expansion is done (default is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.acmo_mesh_provider() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsAcmoMeshProvider(_Outputs): + """Intermediate class used to get outputs from + acmo_mesh_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.acmo_mesh_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() + """ + + def __init__(self, op: Operator): + super().__init__(acmo_mesh_provider._spec().outputs, op) + self._mesh = Output(acmo_mesh_provider._spec().output_pin(0), 0, op) + self._outputs.append(self._mesh) + + @property + def mesh(self): + """Allows to get mesh output of the operator + + Returns + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.acmo_mesh_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 + return self._mesh diff --git a/ansys/dpf/core/operators/mesh/beam_properties.py b/ansys/dpf/core/operators/mesh/beam_properties.py new file mode 100644 index 00000000000..0c3e76b0446 --- /dev/null +++ b/ansys/dpf/core/operators/mesh/beam_properties.py @@ -0,0 +1,516 @@ +""" +beam_properties +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class beam_properties(Operator): + """Read beam's properties from the result files contained in the streams + or data sources. + + Parameters + ---------- + streams : StreamsContainer, optional + result file container allowed to be kept + open to cache data. + data_sources : DataSources + Result file path container, used if no + streams are set. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.beam_properties() + + >>> # Make input connections + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.beam_properties( + ... streams=my_streams, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_mesh_out = op.outputs.mesh_out() + >>> result_field_type_section_id = op.outputs.field_type_section_id() + >>> result_field_area = op.outputs.field_area() + >>> result_field_moment_inertia = op.outputs.field_moment_inertia() + >>> result_field_geometry = op.outputs.field_geometry() + >>> result_field_young_modulus = op.outputs.field_young_modulus() + >>> result_field_poisson_ratio = op.outputs.field_poisson_ratio() + >>> result_field_shear_modulus = op.outputs.field_shear_modulus() + >>> result_field_beam_length = op.outputs.field_beam_length() + >>> result_field_torsion_constant = op.outputs.field_torsion_constant() + >>> result_field_warping_constant = op.outputs.field_warping_constant() + """ + + def __init__(self, streams=None, data_sources=None, config=None, server=None): + super().__init__(name="beam_properties", config=config, server=server) + self._inputs = InputsBeamProperties(self) + self._outputs = OutputsBeamProperties(self) + if streams is not None: + self.inputs.streams.connect(streams) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + + @staticmethod + def _spec(): + description = """Read beam's properties from the result files contained in the streams + or data sources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams", + type_names=["streams_container"], + optional=True, + document=""" result file container allowed to be kept + open to cache data.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_out", + type_names=["abstract_meshed_region"], + optional=False, + document="""This mesh updates a new map containing field + of beam's properties if there is at + least 1 beam in mesh.""", + ), + 1: PinSpecification( + name="field_type_section_id", + type_names=["field"], + optional=False, + document="""This field contains section id of beams. + 1:rec; 3:csolid, 4:ctube, 5:chan, + 6:z, 7:l, 8:i, 9:t, 11:hats, 12:hrec.""", + ), + 2: PinSpecification( + name="field_area", + type_names=["field"], + optional=False, + document="""This field contains area of beams.""", + ), + 3: PinSpecification( + name="field_moment_inertia", + type_names=["field"], + optional=False, + document="""This field contains inertia moment of beams. + iyy, iyz, izz""", + ), + 4: PinSpecification( + name="field_geometry", + type_names=["field"], + optional=False, + document="""This field contains geometry of beams. + rec:b,h. csolid:ri. ctube:ri, re. + chan:w1,w2,w3,t1,t2,t3. + z:w1,w2,w3,t1,t2,t3. l:w1,w2,t1,t2. + i:w1,w2,w3,t1,t2,t3. t:w1,w2,t1,t2. + hats: w1,w2,w3,w4,t1,t2,t3,t4. + hrec:w1,w2,t1,t2,t3,t4.""", + ), + 5: PinSpecification( + name="field_young_modulus", + type_names=["field"], + optional=False, + document="""This field contains young's modulus of beams.""", + ), + 6: PinSpecification( + name="field_poisson_ratio", + type_names=["field"], + optional=False, + document="""This field contains poisson's ratio of beams.""", + ), + 7: PinSpecification( + name="field_shear_modulus", + type_names=["field"], + optional=False, + document="""This field contains shear modulus of beams.""", + ), + 8: PinSpecification( + name="field_beam_length", + type_names=["field"], + optional=False, + document="""This field contains length of beams.""", + ), + 9: PinSpecification( + name="field_torsion_constant", + type_names=["field"], + optional=False, + document="""This field contains torsion constant of + beams.""", + ), + 10: PinSpecification( + name="field_warping_constant", + type_names=["field"], + optional=False, + document="""This field contains warping constant of + beams.""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="beam_properties", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsBeamProperties + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsBeamProperties + """ + return super().outputs + + +class InputsBeamProperties(_Inputs): + """Intermediate class used to connect user inputs to + beam_properties operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + """ + + def __init__(self, op: Operator): + super().__init__(beam_properties._spec().inputs, op) + self._streams = Input(beam_properties._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._streams) + self._data_sources = Input(beam_properties._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + + @property + def streams(self): + """Allows to connect streams input to the operator. + + result file container allowed to be kept + open to cache data. + + Parameters + ---------- + my_streams : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> op.inputs.streams.connect(my_streams) + >>> # or + >>> op.inputs.streams(my_streams) + """ + return self._streams + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + +class OutputsBeamProperties(_Outputs): + """Intermediate class used to get outputs from + beam_properties operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_out = op.outputs.mesh_out() + >>> result_field_type_section_id = op.outputs.field_type_section_id() + >>> result_field_area = op.outputs.field_area() + >>> result_field_moment_inertia = op.outputs.field_moment_inertia() + >>> result_field_geometry = op.outputs.field_geometry() + >>> result_field_young_modulus = op.outputs.field_young_modulus() + >>> result_field_poisson_ratio = op.outputs.field_poisson_ratio() + >>> result_field_shear_modulus = op.outputs.field_shear_modulus() + >>> result_field_beam_length = op.outputs.field_beam_length() + >>> result_field_torsion_constant = op.outputs.field_torsion_constant() + >>> result_field_warping_constant = op.outputs.field_warping_constant() + """ + + def __init__(self, op: Operator): + super().__init__(beam_properties._spec().outputs, op) + self._mesh_out = Output(beam_properties._spec().output_pin(0), 0, op) + self._outputs.append(self._mesh_out) + self._field_type_section_id = Output( + beam_properties._spec().output_pin(1), 1, op + ) + self._outputs.append(self._field_type_section_id) + self._field_area = Output(beam_properties._spec().output_pin(2), 2, op) + self._outputs.append(self._field_area) + self._field_moment_inertia = Output( + beam_properties._spec().output_pin(3), 3, op + ) + self._outputs.append(self._field_moment_inertia) + self._field_geometry = Output(beam_properties._spec().output_pin(4), 4, op) + self._outputs.append(self._field_geometry) + self._field_young_modulus = Output(beam_properties._spec().output_pin(5), 5, op) + self._outputs.append(self._field_young_modulus) + self._field_poisson_ratio = Output(beam_properties._spec().output_pin(6), 6, op) + self._outputs.append(self._field_poisson_ratio) + self._field_shear_modulus = Output(beam_properties._spec().output_pin(7), 7, op) + self._outputs.append(self._field_shear_modulus) + self._field_beam_length = Output(beam_properties._spec().output_pin(8), 8, op) + self._outputs.append(self._field_beam_length) + self._field_torsion_constant = Output( + beam_properties._spec().output_pin(9), 9, op + ) + self._outputs.append(self._field_torsion_constant) + self._field_warping_constant = Output( + beam_properties._spec().output_pin(10), 10, op + ) + self._outputs.append(self._field_warping_constant) + + @property + def mesh_out(self): + """Allows to get mesh_out output of the operator + + Returns + ---------- + my_mesh_out : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_out = op.outputs.mesh_out() + """ # noqa: E501 + return self._mesh_out + + @property + def field_type_section_id(self): + """Allows to get field_type_section_id output of the operator + + Returns + ---------- + my_field_type_section_id : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_type_section_id = op.outputs.field_type_section_id() + """ # noqa: E501 + return self._field_type_section_id + + @property + def field_area(self): + """Allows to get field_area output of the operator + + Returns + ---------- + my_field_area : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_area = op.outputs.field_area() + """ # noqa: E501 + return self._field_area + + @property + def field_moment_inertia(self): + """Allows to get field_moment_inertia output of the operator + + Returns + ---------- + my_field_moment_inertia : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_moment_inertia = op.outputs.field_moment_inertia() + """ # noqa: E501 + return self._field_moment_inertia + + @property + def field_geometry(self): + """Allows to get field_geometry output of the operator + + Returns + ---------- + my_field_geometry : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_geometry = op.outputs.field_geometry() + """ # noqa: E501 + return self._field_geometry + + @property + def field_young_modulus(self): + """Allows to get field_young_modulus output of the operator + + Returns + ---------- + my_field_young_modulus : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_young_modulus = op.outputs.field_young_modulus() + """ # noqa: E501 + return self._field_young_modulus + + @property + def field_poisson_ratio(self): + """Allows to get field_poisson_ratio output of the operator + + Returns + ---------- + my_field_poisson_ratio : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_poisson_ratio = op.outputs.field_poisson_ratio() + """ # noqa: E501 + return self._field_poisson_ratio + + @property + def field_shear_modulus(self): + """Allows to get field_shear_modulus output of the operator + + Returns + ---------- + my_field_shear_modulus : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_shear_modulus = op.outputs.field_shear_modulus() + """ # noqa: E501 + return self._field_shear_modulus + + @property + def field_beam_length(self): + """Allows to get field_beam_length output of the operator + + Returns + ---------- + my_field_beam_length : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_beam_length = op.outputs.field_beam_length() + """ # noqa: E501 + return self._field_beam_length + + @property + def field_torsion_constant(self): + """Allows to get field_torsion_constant output of the operator + + Returns + ---------- + my_field_torsion_constant : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_torsion_constant = op.outputs.field_torsion_constant() + """ # noqa: E501 + return self._field_torsion_constant + + @property + def field_warping_constant(self): + """Allows to get field_warping_constant output of the operator + + Returns + ---------- + my_field_warping_constant : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.beam_properties() + >>> # Connect inputs : op.inputs. ... + >>> result_field_warping_constant = op.outputs.field_warping_constant() + """ # noqa: E501 + return self._field_warping_constant diff --git a/ansys/dpf/core/operators/mesh/external_layer.py b/ansys/dpf/core/operators/mesh/external_layer.py index 1d0efdd290c..ea7a4937f37 100644 --- a/ansys/dpf/core/operators/mesh/external_layer.py +++ b/ansys/dpf/core/operators/mesh/external_layer.py @@ -1,66 +1,109 @@ """ external_layer -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "mesh" category -""" class external_layer(Operator): - """Extracts the external layer (thick skin) of the mesh (3D elements) in a new meshed region + """Extracts the external layer (thick skin) of the mesh (3D elements) in + a new meshed region + + Parameters + ---------- + mesh : MeshedRegion - available inputs: - - mesh (MeshedRegion) - available outputs: - - mesh (MeshedRegion) - - nodes_mesh_scoping (Scoping) - - elements_mesh_scoping (Scoping) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.mesh.external_layer() - >>> # Instantiate operator - >>> op = dpf.operators.mesh.external_layer() + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.external_layer( + ... mesh=my_mesh, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.external_layer(mesh=my_mesh) + >>> # Get output data + >>> result_mesh = op.outputs.mesh() + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + >>> result_elements_mesh_scoping = op.outputs.elements_mesh_scoping() + """ - >>> # Get output data - >>> result_mesh = op.outputs.mesh() - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() - >>> result_elements_mesh_scoping = op.outputs.elements_mesh_scoping()""" def __init__(self, mesh=None, config=None, server=None): - super().__init__(name="meshed_external_layer_sector", config = config, server = server) + super().__init__( + name="meshed_external_layer_sector", config=config, server=server + ) self._inputs = InputsExternalLayer(self) self._outputs = OutputsExternalLayer(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Extracts the external layer (thick skin) of the mesh (3D elements) in a new meshed region""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "nodes_mesh_scoping", type_names=["scoping"], optional=False, document=""""""), - 2 : PinSpecification(name = "elements_mesh_scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """Extracts the external layer (thick skin) of the mesh (3D elements) in + a new meshed region""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="nodes_mesh_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="elements_mesh_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "meshed_external_layer_sector") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="meshed_external_layer_sector", server=server + ) @property def inputs(self): @@ -68,135 +111,129 @@ def inputs(self): Returns -------- - inputs : InputsExternalLayer + inputs : InputsExternalLayer """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsExternalLayer + outputs : OutputsExternalLayer """ return super().outputs -#internal name: meshed_external_layer_sector -#scripting name: external_layer class InputsExternalLayer(_Inputs): - """Intermediate class used to connect user inputs to external_layer operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.external_layer() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + external_layer operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.external_layer() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(external_layer._spec().inputs, op) - self._mesh = Input(external_layer._spec().input_pin(0), 0, op, -1) + self._mesh = Input(external_layer._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.external_layer() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsExternalLayer(_Outputs): - """Intermediate class used to get outputs from external_layer operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.external_layer() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() - >>> result_elements_mesh_scoping = op.outputs.elements_mesh_scoping() + """Intermediate class used to get outputs from + external_layer operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.external_layer() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + >>> result_elements_mesh_scoping = op.outputs.elements_mesh_scoping() """ + def __init__(self, op: Operator): super().__init__(external_layer._spec().outputs, op) - self._mesh = Output(external_layer._spec().output_pin(0), 0, op) + self._mesh = Output(external_layer._spec().output_pin(0), 0, op) self._outputs.append(self._mesh) - self._nodes_mesh_scoping = Output(external_layer._spec().output_pin(1), 1, op) + self._nodes_mesh_scoping = Output(external_layer._spec().output_pin(1), 1, op) self._outputs.append(self._nodes_mesh_scoping) - self._elements_mesh_scoping = Output(external_layer._spec().output_pin(2), 2, op) + self._elements_mesh_scoping = Output( + external_layer._spec().output_pin(2), 2, op + ) self._outputs.append(self._elements_mesh_scoping) @property def mesh(self): """Allows to get mesh output of the operator - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.external_layer() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh @property def nodes_mesh_scoping(self): """Allows to get nodes_mesh_scoping output of the operator - Returns ---------- - my_nodes_mesh_scoping : Scoping, + my_nodes_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.external_layer() >>> # Connect inputs : op.inputs. ... - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() - """ + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + """ # noqa: E501 return self._nodes_mesh_scoping @property def elements_mesh_scoping(self): """Allows to get elements_mesh_scoping output of the operator - Returns ---------- - my_elements_mesh_scoping : Scoping, + my_elements_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.external_layer() >>> # Connect inputs : op.inputs. ... - >>> result_elements_mesh_scoping = op.outputs.elements_mesh_scoping() - """ + >>> result_elements_mesh_scoping = op.outputs.elements_mesh_scoping() + """ # noqa: E501 return self._elements_mesh_scoping - diff --git a/ansys/dpf/core/operators/mesh/from_field.py b/ansys/dpf/core/operators/mesh/from_field.py index 4da3d095ddf..f83cae7fe30 100644 --- a/ansys/dpf/core/operators/mesh/from_field.py +++ b/ansys/dpf/core/operators/mesh/from_field.py @@ -1,60 +1,91 @@ """ from_field -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "mesh" category -""" class from_field(Operator): """Returns the meshed region contained in the support of the mesh. - available inputs: - - field (Field) + Parameters + ---------- + field : Field + - available outputs: - - mesh (MeshedRegion) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.mesh.from_field() - >>> # Instantiate operator - >>> op = dpf.operators.mesh.from_field() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.from_field( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.from_field(field=my_field) + >>> # Get output data + >>> result_mesh = op.outputs.mesh() + """ - >>> # Get output data - >>> result_mesh = op.outputs.mesh()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="GetSupportFromField", config = config, server = server) + super().__init__(name="GetSupportFromField", config=config, server=server) self._inputs = InputsFromField(self) self._outputs = OutputsFromField(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Returns the meshed region contained in the support of the mesh.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}) + description = ( + """Returns the meshed region contained in the support of the mesh.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "GetSupportFromField") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="GetSupportFromField", server=server) @property def inputs(self): @@ -62,91 +93,87 @@ def inputs(self): Returns -------- - inputs : InputsFromField + inputs : InputsFromField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFromField + outputs : OutputsFromField """ return super().outputs -#internal name: GetSupportFromField -#scripting name: from_field class InputsFromField(_Inputs): - """Intermediate class used to connect user inputs to from_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.from_field() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + from_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.from_field() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(from_field._spec().inputs, op) - self._field = Input(from_field._spec().input_pin(0), 0, op, -1) + self._field = Input(from_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.from_field() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsFromField(_Outputs): - """Intermediate class used to get outputs from from_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.from_field() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() + """Intermediate class used to get outputs from + from_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.from_field() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() """ + def __init__(self, op: Operator): super().__init__(from_field._spec().outputs, op) - self._mesh = Output(from_field._spec().output_pin(0), 0, op) + self._mesh = Output(from_field._spec().output_pin(0), 0, op) self._outputs.append(self._mesh) @property def mesh(self): """Allows to get mesh output of the operator - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.from_field() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh - diff --git a/ansys/dpf/core/operators/mesh/from_scoping.py b/ansys/dpf/core/operators/mesh/from_scoping.py index 2544dee245f..b3fe15f9a40 100644 --- a/ansys/dpf/core/operators/mesh/from_scoping.py +++ b/ansys/dpf/core/operators/mesh/from_scoping.py @@ -1,72 +1,131 @@ """ from_scoping -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "mesh" category -""" class from_scoping(Operator): """Extracts a meshed region from an other meshed region base on a scoping - available inputs: - - scoping (Scoping) - - inclusive (int) (optional) - - mesh (MeshedRegion) - - available outputs: - - mesh (MeshedRegion) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mesh.from_scoping() - - >>> # Make input connections - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_inclusive = int() - >>> op.inputs.inclusive.connect(my_inclusive) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.from_scoping(scoping=my_scoping,inclusive=my_inclusive,mesh=my_mesh) + Parameters + ---------- + scoping : Scoping + If nodal scoping, then the scoping is + transposed respecting the inclusive + pin + inclusive : int, optional + If inclusive == 1 then all the elements + adjacent to the nodes ids in input + are added, if inclusive == 0, only + the elements which have all their + nodes in the scoping are included + mesh : MeshedRegion + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.from_scoping() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_inclusive = int() + >>> op.inputs.inclusive.connect(my_inclusive) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.from_scoping( + ... scoping=my_scoping, + ... inclusive=my_inclusive, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_mesh = op.outputs.mesh() + """ - >>> # Get output data - >>> result_mesh = op.outputs.mesh()""" - def __init__(self, scoping=None, inclusive=None, mesh=None, config=None, server=None): - super().__init__(name="mesh::by_scoping", config = config, server = server) + def __init__( + self, scoping=None, inclusive=None, mesh=None, config=None, server=None + ): + super().__init__(name="mesh::by_scoping", config=config, server=server) self._inputs = InputsFromScoping(self) self._outputs = OutputsFromScoping(self) - if scoping !=None: + if scoping is not None: self.inputs.scoping.connect(scoping) - if inclusive !=None: + if inclusive is not None: self.inputs.inclusive.connect(inclusive) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Extracts a meshed region from an other meshed region base on a scoping""", - map_input_pin_spec={ - 1 : PinSpecification(name = "scoping", type_names=["scoping"], optional=False, document="""if nodal scoping, then the scoping is transposed respecting the inclusive pin"""), - 2 : PinSpecification(name = "inclusive", type_names=["int32"], optional=True, document="""if inclusive == 1 then all the elements adjacent to the nodes ids in input are added, if inclusive == 0, only the elements which have all their nodes in the scoping are included"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}) + description = ( + """Extracts a meshed region from an other meshed region base on a scoping""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=False, + document="""If nodal scoping, then the scoping is + transposed respecting the inclusive + pin""", + ), + 2: PinSpecification( + name="inclusive", + type_names=["int32"], + optional=True, + document="""If inclusive == 1 then all the elements + adjacent to the nodes ids in input + are added, if inclusive == 0, only + the elements which have all their + nodes in the scoping are included""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mesh::by_scoping") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mesh::by_scoping", server=server) @property def inputs(self): @@ -74,143 +133,141 @@ def inputs(self): Returns -------- - inputs : InputsFromScoping + inputs : InputsFromScoping """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFromScoping + outputs : OutputsFromScoping """ return super().outputs -#internal name: mesh::by_scoping -#scripting name: from_scoping class InputsFromScoping(_Inputs): - """Intermediate class used to connect user inputs to from_scoping operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.from_scoping() - >>> my_scoping = dpf.Scoping() - >>> op.inputs.scoping.connect(my_scoping) - >>> my_inclusive = int() - >>> op.inputs.inclusive.connect(my_inclusive) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + from_scoping operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.from_scoping() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_inclusive = int() + >>> op.inputs.inclusive.connect(my_inclusive) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(from_scoping._spec().inputs, op) - self._scoping = Input(from_scoping._spec().input_pin(1), 1, op, -1) + self._scoping = Input(from_scoping._spec().input_pin(1), 1, op, -1) self._inputs.append(self._scoping) - self._inclusive = Input(from_scoping._spec().input_pin(2), 2, op, -1) + self._inclusive = Input(from_scoping._spec().input_pin(2), 2, op, -1) self._inputs.append(self._inclusive) - self._mesh = Input(from_scoping._spec().input_pin(7), 7, op, -1) + self._mesh = Input(from_scoping._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def scoping(self): - """Allows to connect scoping input to the operator + """Allows to connect scoping input to the operator. - - pindoc: if nodal scoping, then the scoping is transposed respecting the inclusive pin + If nodal scoping, then the scoping is + transposed respecting the inclusive + pin Parameters ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.from_scoping() >>> op.inputs.scoping.connect(my_scoping) - >>> #or + >>> # or >>> op.inputs.scoping(my_scoping) - """ return self._scoping @property def inclusive(self): - """Allows to connect inclusive input to the operator + """Allows to connect inclusive input to the operator. - - pindoc: if inclusive == 1 then all the elements adjacent to the nodes ids in input are added, if inclusive == 0, only the elements which have all their nodes in the scoping are included + If inclusive == 1 then all the elements + adjacent to the nodes ids in input + are added, if inclusive == 0, only + the elements which have all their + nodes in the scoping are included Parameters ---------- - my_inclusive : int, + my_inclusive : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.from_scoping() >>> op.inputs.inclusive.connect(my_inclusive) - >>> #or + >>> # or >>> op.inputs.inclusive(my_inclusive) - """ return self._inclusive @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.from_scoping() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsFromScoping(_Outputs): - """Intermediate class used to get outputs from from_scoping operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.from_scoping() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() + """Intermediate class used to get outputs from + from_scoping operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.from_scoping() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() """ + def __init__(self, op: Operator): super().__init__(from_scoping._spec().outputs, op) - self._mesh = Output(from_scoping._spec().output_pin(0), 0, op) + self._mesh = Output(from_scoping._spec().output_pin(0), 0, op) self._outputs.append(self._mesh) @property def mesh(self): """Allows to get mesh output of the operator - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.from_scoping() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh - diff --git a/ansys/dpf/core/operators/mesh/mesh_cut.py b/ansys/dpf/core/operators/mesh/mesh_cut.py index 7c492b2f5f2..a7bed386099 100644 --- a/ansys/dpf/core/operators/mesh/mesh_cut.py +++ b/ansys/dpf/core/operators/mesh/mesh_cut.py @@ -1,78 +1,144 @@ """ mesh_cut -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "mesh" category -""" class mesh_cut(Operator): - """Extracts a skin of the mesh in triangles (2D elements) in a new meshed region - - available inputs: - - field (Field) - - iso_value (float) - - closed_surface (float) - - slice_surfaces (bool) - - available outputs: - - mesh (MeshedRegion) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mesh.mesh_cut() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_iso_value = float() - >>> op.inputs.iso_value.connect(my_iso_value) - >>> my_closed_surface = float() - >>> op.inputs.closed_surface.connect(my_closed_surface) - >>> my_slice_surfaces = bool() - >>> op.inputs.slice_surfaces.connect(my_slice_surfaces) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.mesh_cut(field=my_field,iso_value=my_iso_value,closed_surface=my_closed_surface,slice_surfaces=my_slice_surfaces) - - >>> # Get output data - >>> result_mesh = op.outputs.mesh()""" - def __init__(self, field=None, iso_value=None, closed_surface=None, slice_surfaces=None, config=None, server=None): - super().__init__(name="mesh_cut", config = config, server = server) + """Extracts a skin of the mesh in triangles (2D elements) in a new meshed + region + + Parameters + ---------- + field : Field + iso_value : float + Iso value + closed_surface : float + 1: closed surface, 0:iso surface + slice_surfaces : bool + True: slicing will also take into account + shell and 2d elements, false: slicing + will ignore shell and 2d elements. + default is true + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.mesh_cut() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_iso_value = float() + >>> op.inputs.iso_value.connect(my_iso_value) + >>> my_closed_surface = float() + >>> op.inputs.closed_surface.connect(my_closed_surface) + >>> my_slice_surfaces = bool() + >>> op.inputs.slice_surfaces.connect(my_slice_surfaces) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.mesh_cut( + ... field=my_field, + ... iso_value=my_iso_value, + ... closed_surface=my_closed_surface, + ... slice_surfaces=my_slice_surfaces, + ... ) + + >>> # Get output data + >>> result_mesh = op.outputs.mesh() + """ + + def __init__( + self, + field=None, + iso_value=None, + closed_surface=None, + slice_surfaces=None, + config=None, + server=None, + ): + super().__init__(name="mesh_cut", config=config, server=server) self._inputs = InputsMeshCut(self) self._outputs = OutputsMeshCut(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if iso_value !=None: + if iso_value is not None: self.inputs.iso_value.connect(iso_value) - if closed_surface !=None: + if closed_surface is not None: self.inputs.closed_surface.connect(closed_surface) - if slice_surfaces !=None: + if slice_surfaces is not None: self.inputs.slice_surfaces.connect(slice_surfaces) @staticmethod def _spec(): - spec = Specification(description="""Extracts a skin of the mesh in triangles (2D elements) in a new meshed region""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "iso_value", type_names=["double"], optional=False, document="""iso value"""), - 3 : PinSpecification(name = "closed_surface", type_names=["double"], optional=False, document="""1: closed surface, 0:iso surface"""), - 4 : PinSpecification(name = "slice_surfaces", type_names=["bool"], optional=False, document="""true: slicing will also take into account shell and 2D elements, false: sliicing will ignore shell and 2D elements. default is true""")}, - map_output_pin_spec={ - 2 : PinSpecification(name = "mesh", type_names=["meshed_region"], optional=False, document="""""")}) + description = """Extracts a skin of the mesh in triangles (2D elements) in a new meshed + region""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="iso_value", + type_names=["double"], + optional=False, + document="""Iso value""", + ), + 3: PinSpecification( + name="closed_surface", + type_names=["double"], + optional=False, + document="""1: closed surface, 0:iso surface""", + ), + 4: PinSpecification( + name="slice_surfaces", + type_names=["bool"], + optional=False, + document="""True: slicing will also take into account + shell and 2d elements, false: slicing + will ignore shell and 2d elements. + default is true""", + ), + }, + map_output_pin_spec={ + 2: PinSpecification( + name="mesh", + type_names=["meshed_region"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mesh_cut") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mesh_cut", server=server) @property def inputs(self): @@ -80,169 +146,162 @@ def inputs(self): Returns -------- - inputs : InputsMeshCut + inputs : InputsMeshCut """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMeshCut + outputs : OutputsMeshCut """ return super().outputs -#internal name: mesh_cut -#scripting name: mesh_cut class InputsMeshCut(_Inputs): - """Intermediate class used to connect user inputs to mesh_cut operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.mesh_cut() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_iso_value = float() - >>> op.inputs.iso_value.connect(my_iso_value) - >>> my_closed_surface = float() - >>> op.inputs.closed_surface.connect(my_closed_surface) - >>> my_slice_surfaces = bool() - >>> op.inputs.slice_surfaces.connect(my_slice_surfaces) + """Intermediate class used to connect user inputs to + mesh_cut operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_cut() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_iso_value = float() + >>> op.inputs.iso_value.connect(my_iso_value) + >>> my_closed_surface = float() + >>> op.inputs.closed_surface.connect(my_closed_surface) + >>> my_slice_surfaces = bool() + >>> op.inputs.slice_surfaces.connect(my_slice_surfaces) """ + def __init__(self, op: Operator): super().__init__(mesh_cut._spec().inputs, op) - self._field = Input(mesh_cut._spec().input_pin(0), 0, op, -1) + self._field = Input(mesh_cut._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._iso_value = Input(mesh_cut._spec().input_pin(1), 1, op, -1) + self._iso_value = Input(mesh_cut._spec().input_pin(1), 1, op, -1) self._inputs.append(self._iso_value) - self._closed_surface = Input(mesh_cut._spec().input_pin(3), 3, op, -1) + self._closed_surface = Input(mesh_cut._spec().input_pin(3), 3, op, -1) self._inputs.append(self._closed_surface) - self._slice_surfaces = Input(mesh_cut._spec().input_pin(4), 4, op, -1) + self._slice_surfaces = Input(mesh_cut._spec().input_pin(4), 4, op, -1) self._inputs.append(self._slice_surfaces) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_cut() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def iso_value(self): - """Allows to connect iso_value input to the operator + """Allows to connect iso_value input to the operator. - - pindoc: iso value + Iso value Parameters ---------- - my_iso_value : float, + my_iso_value : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_cut() >>> op.inputs.iso_value.connect(my_iso_value) - >>> #or + >>> # or >>> op.inputs.iso_value(my_iso_value) - """ return self._iso_value @property def closed_surface(self): - """Allows to connect closed_surface input to the operator + """Allows to connect closed_surface input to the operator. - - pindoc: 1: closed surface, 0:iso surface + 1: closed surface, 0:iso surface Parameters ---------- - my_closed_surface : float, + my_closed_surface : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_cut() >>> op.inputs.closed_surface.connect(my_closed_surface) - >>> #or + >>> # or >>> op.inputs.closed_surface(my_closed_surface) - """ return self._closed_surface @property def slice_surfaces(self): - """Allows to connect slice_surfaces input to the operator + """Allows to connect slice_surfaces input to the operator. - - pindoc: true: slicing will also take into account shell and 2D elements, false: sliicing will ignore shell and 2D elements. default is true + True: slicing will also take into account + shell and 2d elements, false: slicing + will ignore shell and 2d elements. + default is true Parameters ---------- - my_slice_surfaces : bool, + my_slice_surfaces : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_cut() >>> op.inputs.slice_surfaces.connect(my_slice_surfaces) - >>> #or + >>> # or >>> op.inputs.slice_surfaces(my_slice_surfaces) - """ return self._slice_surfaces + class OutputsMeshCut(_Outputs): - """Intermediate class used to get outputs from mesh_cut operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.mesh_cut() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() + """Intermediate class used to get outputs from + mesh_cut operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_cut() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() """ + def __init__(self, op: Operator): super().__init__(mesh_cut._spec().outputs, op) - self._mesh = Output(mesh_cut._spec().output_pin(2), 2, op) + self._mesh = Output(mesh_cut._spec().output_pin(2), 2, op) self._outputs.append(self._mesh) @property def mesh(self): """Allows to get mesh output of the operator - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_cut() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh - diff --git a/ansys/dpf/core/operators/mesh/mesh_provider.py b/ansys/dpf/core/operators/mesh/mesh_provider.py index ce584a9f58d..58ec4adf708 100644 --- a/ansys/dpf/core/operators/mesh/mesh_provider.py +++ b/ansys/dpf/core/operators/mesh/mesh_provider.py @@ -1,70 +1,129 @@ """ mesh_provider -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "mesh" category -""" class mesh_provider(Operator): """Read a mesh from result files and cure degenerated elements - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - read_cyclic (int) (optional) - - available outputs: - - mesh (MeshedRegion) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mesh.mesh_provider() - - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.mesh_provider(streams_container=my_streams_container,data_sources=my_data_sources) + Parameters + ---------- + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + read_cyclic : int, optional + If 1 cyclic symmetry is ignored, if 2 cyclic + expansion is done (default is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.mesh_provider() + + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.mesh_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_mesh = op.outputs.mesh() + """ - >>> # Get output data - >>> result_mesh = op.outputs.mesh()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="MeshProvider", config = config, server = server) + def __init__( + self, + streams_container=None, + data_sources=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="MeshProvider", config=config, server=server) self._inputs = InputsMeshProvider(self) self._outputs = OutputsMeshProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read a mesh from result files and cure degenerated elements""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 1 cyclic symmetry is ignored, if 2 cyclic expansion is done (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}) + description = """Read a mesh from result files and cure degenerated elements""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 1 cyclic symmetry is ignored, if 2 cyclic + expansion is done (default is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "MeshProvider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="MeshProvider", server=server) @property def inputs(self): @@ -72,145 +131,140 @@ def inputs(self): Returns -------- - inputs : InputsMeshProvider + inputs : InputsMeshProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMeshProvider + outputs : OutputsMeshProvider """ return super().outputs -#internal name: MeshProvider -#scripting name: mesh_provider class InputsMeshProvider(_Inputs): - """Intermediate class used to connect user inputs to mesh_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.mesh_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + mesh_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(mesh_provider._spec().inputs, op) - self._streams_container = Input(mesh_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(mesh_provider._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(mesh_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(mesh_provider._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._read_cyclic = Input(mesh_provider._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(mesh_provider._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 1 cyclic symmetry is ignored, if 2 cyclic expansion is done (default is 1) + If 1 cyclic symmetry is ignored, if 2 cyclic + expansion is done (default is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_provider() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsMeshProvider(_Outputs): - """Intermediate class used to get outputs from mesh_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.mesh_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() + """Intermediate class used to get outputs from + mesh_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() """ + def __init__(self, op: Operator): super().__init__(mesh_provider._spec().outputs, op) - self._mesh = Output(mesh_provider._spec().output_pin(0), 0, op) + self._mesh = Output(mesh_provider._spec().output_pin(0), 0, op) self._outputs.append(self._mesh) @property def mesh(self): """Allows to get mesh output of the operator - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.mesh_provider() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh - diff --git a/ansys/dpf/core/operators/mesh/mesh_to_graphics.py b/ansys/dpf/core/operators/mesh/mesh_to_graphics.py new file mode 100644 index 00000000000..30bea815353 --- /dev/null +++ b/ansys/dpf/core/operators/mesh/mesh_to_graphics.py @@ -0,0 +1,310 @@ +""" +mesh_to_graphics +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class mesh_to_graphics(Operator): + """Generate tessellation for input mesh + + Parameters + ---------- + mesh_scoping : Scoping, optional + node_normals : bool, optional + Average element normals for node normals + (default no, use element normals for + node normals) + mesh : MeshedRegion + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.mesh_to_graphics() + + >>> # Make input connections + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_node_normals = bool() + >>> op.inputs.node_normals.connect(my_node_normals) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.mesh_to_graphics( + ... mesh_scoping=my_mesh_scoping, + ... node_normals=my_node_normals, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_nodes = op.outputs.nodes() + >>> result_normals = op.outputs.normals() + >>> result_connectivity = op.outputs.connectivity() + """ + + def __init__( + self, mesh_scoping=None, node_normals=None, mesh=None, config=None, server=None + ): + super().__init__(name="mesh_to_graphics", config=config, server=server) + self._inputs = InputsMeshToGraphics(self) + self._outputs = OutputsMeshToGraphics(self) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if node_normals is not None: + self.inputs.node_normals.connect(node_normals) + if mesh is not None: + self.inputs.mesh.connect(mesh) + + @staticmethod + def _spec(): + description = """Generate tessellation for input mesh""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="node_normals", + type_names=["bool"], + optional=True, + document="""Average element normals for node normals + (default no, use element normals for + node normals)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="nodes", + type_names=["field"], + optional=False, + document="""Node coordinates""", + ), + 1: PinSpecification( + name="normals", + type_names=["field"], + optional=False, + document="""Node normals""", + ), + 2: PinSpecification( + name="connectivity", + type_names=["property_field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mesh_to_graphics", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMeshToGraphics + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMeshToGraphics + """ + return super().outputs + + +class InputsMeshToGraphics(_Inputs): + """Intermediate class used to connect user inputs to + mesh_to_graphics operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics() + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_node_normals = bool() + >>> op.inputs.node_normals.connect(my_node_normals) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + """ + + def __init__(self, op: Operator): + super().__init__(mesh_to_graphics._spec().inputs, op) + self._mesh_scoping = Input(mesh_to_graphics._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh_scoping) + self._node_normals = Input(mesh_to_graphics._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._node_normals) + self._mesh = Input(mesh_to_graphics._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Parameters + ---------- + my_mesh_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def node_normals(self): + """Allows to connect node_normals input to the operator. + + Average element normals for node normals + (default no, use element normals for + node normals) + + Parameters + ---------- + my_node_normals : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics() + >>> op.inputs.node_normals.connect(my_node_normals) + >>> # or + >>> op.inputs.node_normals(my_node_normals) + """ + return self._node_normals + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + +class OutputsMeshToGraphics(_Outputs): + """Intermediate class used to get outputs from + mesh_to_graphics operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics() + >>> # Connect inputs : op.inputs. ... + >>> result_nodes = op.outputs.nodes() + >>> result_normals = op.outputs.normals() + >>> result_connectivity = op.outputs.connectivity() + """ + + def __init__(self, op: Operator): + super().__init__(mesh_to_graphics._spec().outputs, op) + self._nodes = Output(mesh_to_graphics._spec().output_pin(0), 0, op) + self._outputs.append(self._nodes) + self._normals = Output(mesh_to_graphics._spec().output_pin(1), 1, op) + self._outputs.append(self._normals) + self._connectivity = Output(mesh_to_graphics._spec().output_pin(2), 2, op) + self._outputs.append(self._connectivity) + + @property + def nodes(self): + """Allows to get nodes output of the operator + + Returns + ---------- + my_nodes : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics() + >>> # Connect inputs : op.inputs. ... + >>> result_nodes = op.outputs.nodes() + """ # noqa: E501 + return self._nodes + + @property + def normals(self): + """Allows to get normals output of the operator + + Returns + ---------- + my_normals : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics() + >>> # Connect inputs : op.inputs. ... + >>> result_normals = op.outputs.normals() + """ # noqa: E501 + return self._normals + + @property + def connectivity(self): + """Allows to get connectivity output of the operator + + Returns + ---------- + my_connectivity : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics() + >>> # Connect inputs : op.inputs. ... + >>> result_connectivity = op.outputs.connectivity() + """ # noqa: E501 + return self._connectivity diff --git a/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py b/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py new file mode 100644 index 00000000000..9f0e6bdf9d3 --- /dev/null +++ b/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py @@ -0,0 +1,240 @@ +""" +mesh_to_graphics_edges +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class mesh_to_graphics_edges(Operator): + """Generate edges of surface elements for input mesh + + Parameters + ---------- + mesh_scoping : Scoping, optional + mesh : MeshedRegion + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.mesh_to_graphics_edges() + + >>> # Make input connections + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.mesh_to_graphics_edges( + ... mesh_scoping=my_mesh_scoping, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_nodes = op.outputs.nodes() + >>> result_connectivity = op.outputs.connectivity() + """ + + def __init__(self, mesh_scoping=None, mesh=None, config=None, server=None): + super().__init__(name="mesh_to_graphics_edges", config=config, server=server) + self._inputs = InputsMeshToGraphicsEdges(self) + self._outputs = OutputsMeshToGraphicsEdges(self) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if mesh is not None: + self.inputs.mesh.connect(mesh) + + @staticmethod + def _spec(): + description = """Generate edges of surface elements for input mesh""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="nodes", + type_names=["field"], + optional=False, + document="""Node coordinates""", + ), + 2: PinSpecification( + name="connectivity", + type_names=["property_field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mesh_to_graphics_edges", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMeshToGraphicsEdges + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMeshToGraphicsEdges + """ + return super().outputs + + +class InputsMeshToGraphicsEdges(_Inputs): + """Intermediate class used to connect user inputs to + mesh_to_graphics_edges operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics_edges() + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + """ + + def __init__(self, op: Operator): + super().__init__(mesh_to_graphics_edges._spec().inputs, op) + self._mesh_scoping = Input( + mesh_to_graphics_edges._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._mesh = Input(mesh_to_graphics_edges._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Parameters + ---------- + my_mesh_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics_edges() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics_edges() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + +class OutputsMeshToGraphicsEdges(_Outputs): + """Intermediate class used to get outputs from + mesh_to_graphics_edges operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics_edges() + >>> # Connect inputs : op.inputs. ... + >>> result_nodes = op.outputs.nodes() + >>> result_connectivity = op.outputs.connectivity() + """ + + def __init__(self, op: Operator): + super().__init__(mesh_to_graphics_edges._spec().outputs, op) + self._nodes = Output(mesh_to_graphics_edges._spec().output_pin(0), 0, op) + self._outputs.append(self._nodes) + self._connectivity = Output(mesh_to_graphics_edges._spec().output_pin(2), 2, op) + self._outputs.append(self._connectivity) + + @property + def nodes(self): + """Allows to get nodes output of the operator + + Returns + ---------- + my_nodes : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics_edges() + >>> # Connect inputs : op.inputs. ... + >>> result_nodes = op.outputs.nodes() + """ # noqa: E501 + return self._nodes + + @property + def connectivity(self): + """Allows to get connectivity output of the operator + + Returns + ---------- + my_connectivity : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.mesh_to_graphics_edges() + >>> # Connect inputs : op.inputs. ... + >>> result_connectivity = op.outputs.connectivity() + """ # noqa: E501 + return self._connectivity diff --git a/ansys/dpf/core/operators/mesh/node_coordinates.py b/ansys/dpf/core/operators/mesh/node_coordinates.py index 23e587369bc..c9af7d13f5e 100644 --- a/ansys/dpf/core/operators/mesh/node_coordinates.py +++ b/ansys/dpf/core/operators/mesh/node_coordinates.py @@ -1,60 +1,94 @@ """ node_coordinates -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "mesh" category -""" class node_coordinates(Operator): """Returns the node coordinates of the mesh(es) in input - available inputs: - - mesh (MeshedRegion, MeshesContainer) + Parameters + ---------- + mesh : MeshedRegion or MeshesContainer + - available outputs: - - coordinates (Field ,FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.mesh.node_coordinates() - >>> # Instantiate operator - >>> op = dpf.operators.mesh.node_coordinates() + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.node_coordinates( + ... mesh=my_mesh, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.node_coordinates(mesh=my_mesh) + >>> # Get output data + >>> result_coordinates = op.outputs.coordinates() + """ - >>> # Get output data - >>> result_coordinates = op.outputs.coordinates()""" def __init__(self, mesh=None, config=None, server=None): - super().__init__(name="mesh::node_coordinates", config = config, server = server) + super().__init__(name="mesh::node_coordinates", config=config, server=server) self._inputs = InputsNodeCoordinates(self) self._outputs = OutputsNodeCoordinates(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Returns the node coordinates of the mesh(es) in input""", - map_input_pin_spec={ - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "coordinates", type_names=["field","fields_container"], optional=False, document="""if the input is a meshed region, a field of coordinates is the output, else if the input is a meshes container, a fields container (one field by mesh) is the output""")}) + description = """Returns the node coordinates of the mesh(es) in input""" + spec = Specification( + description=description, + map_input_pin_spec={ + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="coordinates", + type_names=["field", "fields_container"], + optional=False, + document="""If the input is a meshed region, a field of + coordinates is the output, else if + the input is a meshes container, a + fields container (one field by mesh) + is the output""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mesh::node_coordinates") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mesh::node_coordinates", server=server) @property def inputs(self): @@ -62,74 +96,84 @@ def inputs(self): Returns -------- - inputs : InputsNodeCoordinates + inputs : InputsNodeCoordinates """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodeCoordinates + outputs : OutputsNodeCoordinates """ return super().outputs -#internal name: mesh::node_coordinates -#scripting name: node_coordinates class InputsNodeCoordinates(_Inputs): - """Intermediate class used to connect user inputs to node_coordinates operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.node_coordinates() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + node_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.node_coordinates() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(node_coordinates._spec().inputs, op) - self._mesh = Input(node_coordinates._spec().input_pin(7), 7, op, -1) + self._mesh = Input(node_coordinates._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.node_coordinates() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodeCoordinates(_Outputs): - """Intermediate class used to get outputs from node_coordinates operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.node_coordinates() - >>> # Connect inputs : op.inputs. ... - >>> result_coordinates = op.outputs.coordinates() + """Intermediate class used to get outputs from + node_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.node_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_coordinates = op.outputs.coordinates() """ + def __init__(self, op: Operator): super().__init__(node_coordinates._spec().outputs, op) - self.coordinates_as_field = Output( _modify_output_spec_with_one_type(node_coordinates._spec().output_pin(0), "field"), 0, op) + self.coordinates_as_field = Output( + _modify_output_spec_with_one_type( + node_coordinates._spec().output_pin(0), "field" + ), + 0, + op, + ) self._outputs.append(self.coordinates_as_field) - self.coordinates_as_fields_container = Output( _modify_output_spec_with_one_type(node_coordinates._spec().output_pin(0), "fields_container"), 0, op) + self.coordinates_as_fields_container = Output( + _modify_output_spec_with_one_type( + node_coordinates._spec().output_pin(0), "fields_container" + ), + 0, + op, + ) self._outputs.append(self.coordinates_as_fields_container) - diff --git a/ansys/dpf/core/operators/mesh/points_from_coordinates.py b/ansys/dpf/core/operators/mesh/points_from_coordinates.py index 908d7c70d2e..f33dd7a5796 100644 --- a/ansys/dpf/core/operators/mesh/points_from_coordinates.py +++ b/ansys/dpf/core/operators/mesh/points_from_coordinates.py @@ -1,66 +1,107 @@ """ points_from_coordinates -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "mesh" category -""" class points_from_coordinates(Operator): - """Extract a mesh made of points elements. This mesh is made from input meshes coordinates on the input scopings. + """Extract a mesh made of points elements. This mesh is made from input + meshes coordinates on the input scopings. + + Parameters + ---------- + nodes_to_keep : Scoping or ScopingsContainer + mesh : MeshedRegion or MeshesContainer - available inputs: - - nodes_to_keep (Scoping, ScopingsContainer) - - mesh (MeshedRegion, MeshesContainer) - available outputs: - - meshed_region (MeshedRegion) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.mesh.points_from_coordinates() - >>> # Instantiate operator - >>> op = dpf.operators.mesh.points_from_coordinates() + >>> # Make input connections + >>> my_nodes_to_keep = dpf.Scoping() + >>> op.inputs.nodes_to_keep.connect(my_nodes_to_keep) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) - >>> # Make input connections - >>> my_nodes_to_keep = dpf.Scoping() - >>> op.inputs.nodes_to_keep.connect(my_nodes_to_keep) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.points_from_coordinates( + ... nodes_to_keep=my_nodes_to_keep, + ... mesh=my_mesh, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.points_from_coordinates(nodes_to_keep=my_nodes_to_keep,mesh=my_mesh) + >>> # Get output data + >>> result_abstract_meshed_region = op.outputs.abstract_meshed_region() + """ - >>> # Get output data - >>> result_meshed_region = op.outputs.meshed_region()""" def __init__(self, nodes_to_keep=None, mesh=None, config=None, server=None): - super().__init__(name="mesh::points_from_coordinates", config = config, server = server) + super().__init__( + name="mesh::points_from_coordinates", config=config, server=server + ) self._inputs = InputsPointsFromCoordinates(self) self._outputs = OutputsPointsFromCoordinates(self) - if nodes_to_keep !=None: + if nodes_to_keep is not None: self.inputs.nodes_to_keep.connect(nodes_to_keep) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Extract a mesh made of points elements. This mesh is made from input meshes coordinates on the input scopings.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "nodes_to_keep", type_names=["scoping","scopings_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "meshed_region", type_names=["abstract_meshed_region"], optional=False, document="""""")}) + description = """Extract a mesh made of points elements. This mesh is made from input + meshes coordinates on the input scopings.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="nodes_to_keep", + type_names=["scoping", "scopings_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mesh::points_from_coordinates") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="mesh::points_from_coordinates", server=server + ) @property def inputs(self): @@ -68,115 +109,113 @@ def inputs(self): Returns -------- - inputs : InputsPointsFromCoordinates + inputs : InputsPointsFromCoordinates """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPointsFromCoordinates + outputs : OutputsPointsFromCoordinates """ return super().outputs -#internal name: mesh::points_from_coordinates -#scripting name: points_from_coordinates class InputsPointsFromCoordinates(_Inputs): - """Intermediate class used to connect user inputs to points_from_coordinates operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.points_from_coordinates() - >>> my_nodes_to_keep = dpf.Scoping() - >>> op.inputs.nodes_to_keep.connect(my_nodes_to_keep) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + points_from_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.points_from_coordinates() + >>> my_nodes_to_keep = dpf.Scoping() + >>> op.inputs.nodes_to_keep.connect(my_nodes_to_keep) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(points_from_coordinates._spec().inputs, op) - self._nodes_to_keep = Input(points_from_coordinates._spec().input_pin(0), 0, op, -1) + self._nodes_to_keep = Input( + points_from_coordinates._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._nodes_to_keep) - self._mesh = Input(points_from_coordinates._spec().input_pin(1), 1, op, -1) + self._mesh = Input(points_from_coordinates._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) @property def nodes_to_keep(self): - """Allows to connect nodes_to_keep input to the operator + """Allows to connect nodes_to_keep input to the operator. Parameters ---------- - my_nodes_to_keep : Scoping, ScopingsContainer, + my_nodes_to_keep : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.points_from_coordinates() >>> op.inputs.nodes_to_keep.connect(my_nodes_to_keep) - >>> #or + >>> # or >>> op.inputs.nodes_to_keep(my_nodes_to_keep) - """ return self._nodes_to_keep @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.points_from_coordinates() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsPointsFromCoordinates(_Outputs): - """Intermediate class used to get outputs from points_from_coordinates operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.points_from_coordinates() - >>> # Connect inputs : op.inputs. ... - >>> result_meshed_region = op.outputs.meshed_region() + """Intermediate class used to get outputs from + points_from_coordinates operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.points_from_coordinates() + >>> # Connect inputs : op.inputs. ... + >>> result_abstract_meshed_region = op.outputs.abstract_meshed_region() """ + def __init__(self, op: Operator): super().__init__(points_from_coordinates._spec().outputs, op) - self._meshed_region = Output(points_from_coordinates._spec().output_pin(0), 0, op) - self._outputs.append(self._meshed_region) + self._abstract_meshed_region = Output( + points_from_coordinates._spec().output_pin(0), 0, op + ) + self._outputs.append(self._abstract_meshed_region) @property - def meshed_region(self): - """Allows to get meshed_region output of the operator - + def abstract_meshed_region(self): + """Allows to get abstract_meshed_region output of the operator Returns ---------- - my_meshed_region : MeshedRegion, + my_abstract_meshed_region : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.points_from_coordinates() >>> # Connect inputs : op.inputs. ... - >>> result_meshed_region = op.outputs.meshed_region() - """ - return self._meshed_region - + >>> result_abstract_meshed_region = op.outputs.abstract_meshed_region() + """ # noqa: E501 + return self._abstract_meshed_region diff --git a/ansys/dpf/core/operators/mesh/skin.py b/ansys/dpf/core/operators/mesh/skin.py index a5cb9200b2c..892376cbe6b 100644 --- a/ansys/dpf/core/operators/mesh/skin.py +++ b/ansys/dpf/core/operators/mesh/skin.py @@ -1,75 +1,126 @@ """ skin -==== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "mesh" category -""" class skin(Operator): - """Extracts a skin of the mesh (2D elements) in a new meshed region. Material id of initial elements are propagated to their facets. - - available inputs: - - mesh (MeshedRegion) - - mesh_scoping (Scoping) (optional) - - available outputs: - - mesh (MeshedRegion) - - nodes_mesh_scoping (Scoping) - - map_new_elements_to_old () - - property_field_new_elements_to_old (PropertyField) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mesh.skin() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.skin(mesh=my_mesh,mesh_scoping=my_mesh_scoping) - - >>> # Get output data - >>> result_mesh = op.outputs.mesh() - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() - >>> result_map_new_elements_to_old = op.outputs.map_new_elements_to_old() - >>> result_property_field_new_elements_to_old = op.outputs.property_field_new_elements_to_old()""" + """Extracts a skin of the mesh (2D elements) in a new meshed region. + Material id of initial elements are propagated to their facets. + + Parameters + ---------- + mesh : MeshedRegion + mesh_scoping : Scoping, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.skin() + + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.skin( + ... mesh=my_mesh, + ... mesh_scoping=my_mesh_scoping, + ... ) + + >>> # Get output data + >>> result_mesh = op.outputs.mesh() + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + >>> result_map_new_elements_to_old = op.outputs.map_new_elements_to_old() + >>> result_property_field_new_elements_to_old = op.outputs.property_field_new_elements_to_old() + """ + def __init__(self, mesh=None, mesh_scoping=None, config=None, server=None): - super().__init__(name="meshed_skin_sector", config = config, server = server) + super().__init__(name="meshed_skin_sector", config=config, server=server) self._inputs = InputsSkin(self) self._outputs = OutputsSkin(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) @staticmethod def _spec(): - spec = Specification(description="""Extracts a skin of the mesh (2D elements) in a new meshed region. Material id of initial elements are propagated to their facets.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""skin meshed region with facets and facets_to_ele property fields"""), - 1 : PinSpecification(name = "nodes_mesh_scoping", type_names=["scoping"], optional=False, document=""""""), - 2 : PinSpecification(name = "map_new_elements_to_old", type_names=[], optional=False, document=""""""), - 3 : PinSpecification(name = "property_field_new_elements_to_old", type_names=["property_field"], optional=False, document="""""")}) + description = """Extracts a skin of the mesh (2D elements) in a new meshed region. + Material id of initial elements are propagated to their + facets.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""Skin meshed region with facets and + facets_to_ele property fields""", + ), + 1: PinSpecification( + name="nodes_mesh_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="map_new_elements_to_old", + type_names=["umap"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="property_field_new_elements_to_old", + type_names=["property_field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "meshed_skin_sector") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="meshed_skin_sector", server=server) @property def inputs(self): @@ -77,140 +128,171 @@ def inputs(self): Returns -------- - inputs : InputsSkin + inputs : InputsSkin """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSkin + outputs : OutputsSkin """ return super().outputs -#internal name: meshed_skin_sector -#scripting name: skin class InputsSkin(_Inputs): - """Intermediate class used to connect user inputs to skin operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.skin() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + """Intermediate class used to connect user inputs to + skin operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.skin() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) """ + def __init__(self, op: Operator): super().__init__(skin._spec().inputs, op) - self._mesh = Input(skin._spec().input_pin(0), 0, op, -1) + self._mesh = Input(skin._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._mesh_scoping = Input(skin._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(skin._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.skin() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.skin() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping + class OutputsSkin(_Outputs): - """Intermediate class used to get outputs from skin operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.skin() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + """Intermediate class used to get outputs from + skin operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.skin() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + >>> result_map_new_elements_to_old = op.outputs.map_new_elements_to_old() + >>> result_property_field_new_elements_to_old = op.outputs.property_field_new_elements_to_old() """ + def __init__(self, op: Operator): super().__init__(skin._spec().outputs, op) - self._mesh = Output(skin._spec().output_pin(0), 0, op) + self._mesh = Output(skin._spec().output_pin(0), 0, op) self._outputs.append(self._mesh) - self._nodes_mesh_scoping = Output(skin._spec().output_pin(1), 1, op) + self._nodes_mesh_scoping = Output(skin._spec().output_pin(1), 1, op) self._outputs.append(self._nodes_mesh_scoping) - pass + self._map_new_elements_to_old = Output(skin._spec().output_pin(2), 2, op) + self._outputs.append(self._map_new_elements_to_old) + self._property_field_new_elements_to_old = Output( + skin._spec().output_pin(3), 3, op + ) + self._outputs.append(self._property_field_new_elements_to_old) @property def mesh(self): """Allows to get mesh output of the operator - - - pindoc: skin meshed region with facets and facets_to_ele property fields - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.skin() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh @property def nodes_mesh_scoping(self): """Allows to get nodes_mesh_scoping output of the operator - Returns ---------- - my_nodes_mesh_scoping : Scoping, + my_nodes_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.skin() >>> # Connect inputs : op.inputs. ... - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() - """ + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + """ # noqa: E501 return self._nodes_mesh_scoping + @property + def map_new_elements_to_old(self): + """Allows to get map_new_elements_to_old output of the operator + + Returns + ---------- + my_map_new_elements_to_old : + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.skin() + >>> # Connect inputs : op.inputs. ... + >>> result_map_new_elements_to_old = op.outputs.map_new_elements_to_old() + """ # noqa: E501 + return self._map_new_elements_to_old + + @property + def property_field_new_elements_to_old(self): + """Allows to get property_field_new_elements_to_old output of the operator + + Returns + ---------- + my_property_field_new_elements_to_old : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.skin() + >>> # Connect inputs : op.inputs. ... + >>> result_property_field_new_elements_to_old = op.outputs.property_field_new_elements_to_old() + """ # noqa: E501 + return self._property_field_new_elements_to_old diff --git a/ansys/dpf/core/operators/mesh/split_fields.py b/ansys/dpf/core/operators/mesh/split_fields.py index f755d32358c..b0255b8a691 100644 --- a/ansys/dpf/core/operators/mesh/split_fields.py +++ b/ansys/dpf/core/operators/mesh/split_fields.py @@ -1,66 +1,112 @@ """ split_fields -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "mesh" category -""" class split_fields(Operator): - """Split the input field or fields container based on the input mesh regions - - available inputs: - - field_or_fields_container (Field, FieldsContainer) - - mesh_controller (MeshesContainer) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mesh.split_fields() - - >>> # Make input connections - >>> my_field_or_fields_container = dpf.Field() - >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) - >>> my_mesh_controller = dpf.MeshesContainer() - >>> op.inputs.mesh_controller.connect(my_mesh_controller) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.split_fields(field_or_fields_container=my_field_or_fields_container,mesh_controller=my_mesh_controller) + """Split the input field or fields container based on the input mesh + regions + + Parameters + ---------- + field_or_fields_container : Field or FieldsContainer + mesh_controller : MeshesContainer + Body meshes in the mesh controller cannot be + mixed shell/solid + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.split_fields() + + >>> # Make input connections + >>> my_field_or_fields_container = dpf.Field() + >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) + >>> my_mesh_controller = dpf.MeshesContainer() + >>> op.inputs.mesh_controller.connect(my_mesh_controller) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.split_fields( + ... field_or_fields_container=my_field_or_fields_container, + ... mesh_controller=my_mesh_controller, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, field_or_fields_container=None, mesh_controller=None, config=None, server=None): - super().__init__(name="split_fields", config = config, server = server) + def __init__( + self, + field_or_fields_container=None, + mesh_controller=None, + config=None, + server=None, + ): + super().__init__(name="split_fields", config=config, server=server) self._inputs = InputsSplitFields(self) self._outputs = OutputsSplitFields(self) - if field_or_fields_container !=None: + if field_or_fields_container is not None: self.inputs.field_or_fields_container.connect(field_or_fields_container) - if mesh_controller !=None: + if mesh_controller is not None: self.inputs.mesh_controller.connect(mesh_controller) @staticmethod def _spec(): - spec = Specification(description="""Split the input field or fields container based on the input mesh regions """, - map_input_pin_spec={ - 0 : PinSpecification(name = "field_or_fields_container", type_names=["field","fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh_controller", type_names=["meshes_container"], optional=False, document="""body meshes in the mesh controller cannot be mixed shell/solid""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Split the input field or fields container based on the input mesh + regions""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field_or_fields_container", + type_names=["field", "fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh_controller", + type_names=["meshes_container"], + optional=False, + document="""Body meshes in the mesh controller cannot be + mixed shell/solid""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "split_fields") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="split_fields", server=server) @property def inputs(self): @@ -68,117 +114,114 @@ def inputs(self): Returns -------- - inputs : InputsSplitFields + inputs : InputsSplitFields """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSplitFields + outputs : OutputsSplitFields """ return super().outputs -#internal name: split_fields -#scripting name: split_fields class InputsSplitFields(_Inputs): - """Intermediate class used to connect user inputs to split_fields operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.split_fields() - >>> my_field_or_fields_container = dpf.Field() - >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) - >>> my_mesh_controller = dpf.MeshesContainer() - >>> op.inputs.mesh_controller.connect(my_mesh_controller) + """Intermediate class used to connect user inputs to + split_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.split_fields() + >>> my_field_or_fields_container = dpf.Field() + >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) + >>> my_mesh_controller = dpf.MeshesContainer() + >>> op.inputs.mesh_controller.connect(my_mesh_controller) """ + def __init__(self, op: Operator): super().__init__(split_fields._spec().inputs, op) - self._field_or_fields_container = Input(split_fields._spec().input_pin(0), 0, op, -1) + self._field_or_fields_container = Input( + split_fields._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field_or_fields_container) - self._mesh_controller = Input(split_fields._spec().input_pin(1), 1, op, -1) + self._mesh_controller = Input(split_fields._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_controller) @property def field_or_fields_container(self): - """Allows to connect field_or_fields_container input to the operator + """Allows to connect field_or_fields_container input to the operator. Parameters ---------- - my_field_or_fields_container : Field, FieldsContainer, + my_field_or_fields_container : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.split_fields() >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) - >>> #or + >>> # or >>> op.inputs.field_or_fields_container(my_field_or_fields_container) - """ return self._field_or_fields_container @property def mesh_controller(self): - """Allows to connect mesh_controller input to the operator + """Allows to connect mesh_controller input to the operator. - - pindoc: body meshes in the mesh controller cannot be mixed shell/solid + Body meshes in the mesh controller cannot be + mixed shell/solid Parameters ---------- - my_mesh_controller : MeshesContainer, + my_mesh_controller : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.split_fields() >>> op.inputs.mesh_controller.connect(my_mesh_controller) - >>> #or + >>> # or >>> op.inputs.mesh_controller(my_mesh_controller) - """ return self._mesh_controller + class OutputsSplitFields(_Outputs): - """Intermediate class used to get outputs from split_fields operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.split_fields() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + split_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.split_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(split_fields._spec().outputs, op) - self._fields_container = Output(split_fields._spec().output_pin(0), 0, op) + self._fields_container = Output(split_fields._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.split_fields() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/mesh/split_mesh.py b/ansys/dpf/core/operators/mesh/split_mesh.py index 4c72a75156c..e0bba8c3d69 100644 --- a/ansys/dpf/core/operators/mesh/split_mesh.py +++ b/ansys/dpf/core/operators/mesh/split_mesh.py @@ -1,72 +1,118 @@ """ split_mesh -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "mesh" category -""" class split_mesh(Operator): - """Split the input mesh into several meshes based on a given property (material property be default) - - available inputs: - - mesh_scoping (Scoping) (optional) - - mesh (MeshedRegion) - - property (str) - - available outputs: - - mesh_controller (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mesh.split_mesh() - - >>> # Make input connections - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_property = str() - >>> op.inputs.property.connect(my_property) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.split_mesh(mesh_scoping=my_mesh_scoping,mesh=my_mesh,property=my_property) - - >>> # Get output data - >>> result_mesh_controller = op.outputs.mesh_controller()""" - def __init__(self, mesh_scoping=None, mesh=None, property=None, config=None, server=None): - super().__init__(name="split_mesh", config = config, server = server) + """Split the input mesh into several meshes based on a given property + (material property be default) + + Parameters + ---------- + mesh_scoping : Scoping, optional + Scoping + mesh : MeshedRegion + property : str + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.split_mesh() + + >>> # Make input connections + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_property = str() + >>> op.inputs.property.connect(my_property) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.split_mesh( + ... mesh_scoping=my_mesh_scoping, + ... mesh=my_mesh, + ... property=my_property, + ... ) + + >>> # Get output data + >>> result_mesh_controller = op.outputs.mesh_controller() + """ + + def __init__( + self, mesh_scoping=None, mesh=None, property=None, config=None, server=None + ): + super().__init__(name="split_mesh", config=config, server=server) self._inputs = InputsSplitMesh(self) self._outputs = OutputsSplitMesh(self) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if property !=None: + if property is not None: self.inputs.property.connect(property) @staticmethod def _spec(): - spec = Specification(description="""Split the input mesh into several meshes based on a given property (material property be default)""", - map_input_pin_spec={ - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""Scoping"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 13 : PinSpecification(name = "property", type_names=["string"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_controller", type_names=["meshes_container"], optional=False, document="""""")}) + description = """Split the input mesh into several meshes based on a given property + (material property be default)""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Scoping""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 13: PinSpecification( + name="property", + type_names=["string"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_controller", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "split_mesh") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="split_mesh", server=server) @property def inputs(self): @@ -74,141 +120,133 @@ def inputs(self): Returns -------- - inputs : InputsSplitMesh + inputs : InputsSplitMesh """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSplitMesh + outputs : OutputsSplitMesh """ return super().outputs -#internal name: split_mesh -#scripting name: split_mesh class InputsSplitMesh(_Inputs): - """Intermediate class used to connect user inputs to split_mesh operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.split_mesh() - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_property = str() - >>> op.inputs.property.connect(my_property) + """Intermediate class used to connect user inputs to + split_mesh operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.split_mesh() + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_property = str() + >>> op.inputs.property.connect(my_property) """ + def __init__(self, op: Operator): super().__init__(split_mesh._spec().inputs, op) - self._mesh_scoping = Input(split_mesh._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(split_mesh._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._mesh = Input(split_mesh._spec().input_pin(7), 7, op, -1) + self._mesh = Input(split_mesh._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._property = Input(split_mesh._spec().input_pin(13), 13, op, -1) + self._property = Input(split_mesh._spec().input_pin(13), 13, op, -1) self._inputs.append(self._property) @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: Scoping + Scoping Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.split_mesh() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.split_mesh() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def property(self): - """Allows to connect property input to the operator + """Allows to connect property input to the operator. Parameters ---------- - my_property : str, + my_property : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.split_mesh() >>> op.inputs.property.connect(my_property) - >>> #or + >>> # or >>> op.inputs.property(my_property) - """ return self._property + class OutputsSplitMesh(_Outputs): - """Intermediate class used to get outputs from split_mesh operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.split_mesh() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_controller = op.outputs.mesh_controller() + """Intermediate class used to get outputs from + split_mesh operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.split_mesh() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_controller = op.outputs.mesh_controller() """ + def __init__(self, op: Operator): super().__init__(split_mesh._spec().outputs, op) - self._mesh_controller = Output(split_mesh._spec().output_pin(0), 0, op) + self._mesh_controller = Output(split_mesh._spec().output_pin(0), 0, op) self._outputs.append(self._mesh_controller) @property def mesh_controller(self): """Allows to get mesh_controller output of the operator - Returns ---------- - my_mesh_controller : MeshesContainer, + my_mesh_controller : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.split_mesh() >>> # Connect inputs : op.inputs. ... - >>> result_mesh_controller = op.outputs.mesh_controller() - """ + >>> result_mesh_controller = op.outputs.mesh_controller() + """ # noqa: E501 return self._mesh_controller - diff --git a/ansys/dpf/core/operators/mesh/stl_export.py b/ansys/dpf/core/operators/mesh/stl_export.py index cdbabea705e..96616ae335f 100644 --- a/ansys/dpf/core/operators/mesh/stl_export.py +++ b/ansys/dpf/core/operators/mesh/stl_export.py @@ -1,66 +1,101 @@ """ stl_export -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "mesh" category -""" class stl_export(Operator): """export a mesh into a stl file. - available inputs: - - mesh (MeshedRegion) - - file_path (str) + Parameters + ---------- + mesh : MeshedRegion + file_path : str + - available outputs: - - data_sources (DataSources) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.mesh.stl_export() - >>> # Instantiate operator - >>> op = dpf.operators.mesh.stl_export() + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.stl_export( + ... mesh=my_mesh, + ... file_path=my_file_path, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.stl_export(mesh=my_mesh,file_path=my_file_path) + >>> # Get output data + >>> result_data_sources = op.outputs.data_sources() + """ - >>> # Get output data - >>> result_data_sources = op.outputs.data_sources()""" def __init__(self, mesh=None, file_path=None, config=None, server=None): - super().__init__(name="stl_export", config = config, server = server) + super().__init__(name="stl_export", config=config, server=server) self._inputs = InputsStlExport(self) self._outputs = OutputsStlExport(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if file_path !=None: + if file_path is not None: self.inputs.file_path.connect(file_path) @staticmethod def _spec(): - spec = Specification(description="""export a mesh into a stl file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "file_path", type_names=["string"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}) + description = """export a mesh into a stl file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="file_path", + type_names=["string"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "stl_export") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="stl_export", server=server) @property def inputs(self): @@ -68,115 +103,109 @@ def inputs(self): Returns -------- - inputs : InputsStlExport + inputs : InputsStlExport """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStlExport + outputs : OutputsStlExport """ return super().outputs -#internal name: stl_export -#scripting name: stl_export class InputsStlExport(_Inputs): - """Intermediate class used to connect user inputs to stl_export operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.stl_export() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) + """Intermediate class used to connect user inputs to + stl_export operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.stl_export() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) """ + def __init__(self, op: Operator): super().__init__(stl_export._spec().inputs, op) - self._mesh = Input(stl_export._spec().input_pin(0), 0, op, -1) + self._mesh = Input(stl_export._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._file_path = Input(stl_export._spec().input_pin(1), 1, op, -1) + self._file_path = Input(stl_export._spec().input_pin(1), 1, op, -1) self._inputs.append(self._file_path) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.stl_export() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def file_path(self): - """Allows to connect file_path input to the operator + """Allows to connect file_path input to the operator. Parameters ---------- - my_file_path : str, + my_file_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.stl_export() >>> op.inputs.file_path.connect(my_file_path) - >>> #or + >>> # or >>> op.inputs.file_path(my_file_path) - """ return self._file_path + class OutputsStlExport(_Outputs): - """Intermediate class used to get outputs from stl_export operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.stl_export() - >>> # Connect inputs : op.inputs. ... - >>> result_data_sources = op.outputs.data_sources() + """Intermediate class used to get outputs from + stl_export operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.stl_export() + >>> # Connect inputs : op.inputs. ... + >>> result_data_sources = op.outputs.data_sources() """ + def __init__(self, op: Operator): super().__init__(stl_export._spec().outputs, op) - self._data_sources = Output(stl_export._spec().output_pin(0), 0, op) + self._data_sources = Output(stl_export._spec().output_pin(0), 0, op) self._outputs.append(self._data_sources) @property def data_sources(self): """Allows to get data_sources output of the operator - Returns ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.stl_export() >>> # Connect inputs : op.inputs. ... - >>> result_data_sources = op.outputs.data_sources() - """ + >>> result_data_sources = op.outputs.data_sources() + """ # noqa: E501 return self._data_sources - diff --git a/ansys/dpf/core/operators/mesh/tri_mesh_skin.py b/ansys/dpf/core/operators/mesh/tri_mesh_skin.py index be4303958fb..2d7f26eb2d5 100644 --- a/ansys/dpf/core/operators/mesh/tri_mesh_skin.py +++ b/ansys/dpf/core/operators/mesh/tri_mesh_skin.py @@ -1,63 +1,121 @@ """ tri_mesh_skin -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "mesh" category -""" class tri_mesh_skin(Operator): - """Extracts a skin of the mesh in triangles (2D elements) in a new meshed region - - available inputs: - - mesh (MeshedRegion) - - available outputs: - - mesh (MeshedRegion) - - nodes_mesh_scoping (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.mesh.tri_mesh_skin() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.mesh.tri_mesh_skin(mesh=my_mesh) + """Extracts a skin of the mesh in triangles (2D elements) in a new meshed + region + + Parameters + ---------- + mesh : MeshedRegion + include_surfaces : bool, optional + True: meshing will also take into account + shell and 2d elements, false: meshing + will ignore shell and 2d elements. + default is false + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.tri_mesh_skin() + + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_include_surfaces = bool() + >>> op.inputs.include_surfaces.connect(my_include_surfaces) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.tri_mesh_skin( + ... mesh=my_mesh, + ... include_surfaces=my_include_surfaces, + ... ) + + >>> # Get output data + >>> result_mesh = op.outputs.mesh() + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + """ - >>> # Get output data - >>> result_mesh = op.outputs.mesh() - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping()""" - def __init__(self, mesh=None, config=None, server=None): - super().__init__(name="meshed_skin_sector_triangle", config = config, server = server) + def __init__(self, mesh=None, include_surfaces=None, config=None, server=None): + super().__init__( + name="meshed_skin_sector_triangle", config=config, server=server + ) self._inputs = InputsTriMeshSkin(self) self._outputs = OutputsTriMeshSkin(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) + if include_surfaces is not None: + self.inputs.include_surfaces.connect(include_surfaces) @staticmethod def _spec(): - spec = Specification(description="""Extracts a skin of the mesh in triangles (2D elements) in a new meshed region""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "nodes_mesh_scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """Extracts a skin of the mesh in triangles (2D elements) in a new meshed + region""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="include_surfaces", + type_names=["bool"], + optional=True, + document="""True: meshing will also take into account + shell and 2d elements, false: meshing + will ignore shell and 2d elements. + default is false""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="nodes_mesh_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "meshed_skin_sector_triangle") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="meshed_skin_sector_triangle", server=server + ) @property def inputs(self): @@ -65,113 +123,134 @@ def inputs(self): Returns -------- - inputs : InputsTriMeshSkin + inputs : InputsTriMeshSkin """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsTriMeshSkin + outputs : OutputsTriMeshSkin """ return super().outputs -#internal name: meshed_skin_sector_triangle -#scripting name: tri_mesh_skin class InputsTriMeshSkin(_Inputs): - """Intermediate class used to connect user inputs to tri_mesh_skin operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.tri_mesh_skin() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + tri_mesh_skin operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.tri_mesh_skin() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_include_surfaces = bool() + >>> op.inputs.include_surfaces.connect(my_include_surfaces) """ + def __init__(self, op: Operator): super().__init__(tri_mesh_skin._spec().inputs, op) - self._mesh = Input(tri_mesh_skin._spec().input_pin(0), 0, op, -1) + self._mesh = Input(tri_mesh_skin._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) + self._include_surfaces = Input(tri_mesh_skin._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._include_surfaces) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.tri_mesh_skin() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + @property + def include_surfaces(self): + """Allows to connect include_surfaces input to the operator. + + True: meshing will also take into account + shell and 2d elements, false: meshing + will ignore shell and 2d elements. + default is false + + Parameters + ---------- + my_include_surfaces : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.tri_mesh_skin() + >>> op.inputs.include_surfaces.connect(my_include_surfaces) + >>> # or + >>> op.inputs.include_surfaces(my_include_surfaces) + """ + return self._include_surfaces + + class OutputsTriMeshSkin(_Outputs): - """Intermediate class used to get outputs from tri_mesh_skin operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.mesh.tri_mesh_skin() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + """Intermediate class used to get outputs from + tri_mesh_skin operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.tri_mesh_skin() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh = op.outputs.mesh() + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() """ + def __init__(self, op: Operator): super().__init__(tri_mesh_skin._spec().outputs, op) - self._mesh = Output(tri_mesh_skin._spec().output_pin(0), 0, op) + self._mesh = Output(tri_mesh_skin._spec().output_pin(0), 0, op) self._outputs.append(self._mesh) - self._nodes_mesh_scoping = Output(tri_mesh_skin._spec().output_pin(1), 1, op) + self._nodes_mesh_scoping = Output(tri_mesh_skin._spec().output_pin(1), 1, op) self._outputs.append(self._nodes_mesh_scoping) @property def mesh(self): """Allows to get mesh output of the operator - Returns ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.tri_mesh_skin() >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ + >>> result_mesh = op.outputs.mesh() + """ # noqa: E501 return self._mesh @property def nodes_mesh_scoping(self): """Allows to get nodes_mesh_scoping output of the operator - Returns ---------- - my_nodes_mesh_scoping : Scoping, + my_nodes_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.mesh.tri_mesh_skin() >>> # Connect inputs : op.inputs. ... - >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() - """ + >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() + """ # noqa: E501 return self._nodes_mesh_scoping - diff --git a/ansys/dpf/core/operators/metadata/boundary_condition_provider.py b/ansys/dpf/core/operators/metadata/boundary_condition_provider.py index fe6e897245a..0fcf9bb8f71 100644 --- a/ansys/dpf/core/operators/metadata/boundary_condition_provider.py +++ b/ansys/dpf/core/operators/metadata/boundary_condition_provider.py @@ -1,66 +1,106 @@ """ boundary_condition_provider -=========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class boundary_condition_provider(Operator): - """Read boundary conditions from the results files contained in the streams or data sources. + """Read boundary conditions from the results files contained in the + streams or data sources. + + Parameters + ---------- + streams_container : StreamsContainer, optional + data_sources : DataSources - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - available outputs: - - results_info (Field ,FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.metadata.boundary_condition_provider() - >>> # Instantiate operator - >>> op = dpf.operators.metadata.boundary_condition_provider() + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.boundary_condition_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.boundary_condition_provider(streams_container=my_streams_container,data_sources=my_data_sources) + >>> # Get output data + >>> result_results_info = op.outputs.results_info() + """ - >>> # Get output data - >>> result_results_info = op.outputs.results_info()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="boundary_conditions", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__(name="boundary_conditions", config=config, server=server) self._inputs = InputsBoundaryConditionProvider(self) self._outputs = OutputsBoundaryConditionProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read boundary conditions from the results files contained in the streams or data sources.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "results_info", type_names=["field","fields_container"], optional=False, document="""results info""")}) + description = """Read boundary conditions from the results files contained in the + streams or data sources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="results_info", + type_names=["field", "fields_container"], + optional=False, + document="""Results info""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "boundary_conditions") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="boundary_conditions", server=server) @property def inputs(self): @@ -68,98 +108,110 @@ def inputs(self): Returns -------- - inputs : InputsBoundaryConditionProvider + inputs : InputsBoundaryConditionProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsBoundaryConditionProvider + outputs : OutputsBoundaryConditionProvider """ return super().outputs -#internal name: boundary_conditions -#scripting name: boundary_condition_provider class InputsBoundaryConditionProvider(_Inputs): - """Intermediate class used to connect user inputs to boundary_condition_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.boundary_condition_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + boundary_condition_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.boundary_condition_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(boundary_condition_provider._spec().inputs, op) - self._streams_container = Input(boundary_condition_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + boundary_condition_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(boundary_condition_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + boundary_condition_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.boundary_condition_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.boundary_condition_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsBoundaryConditionProvider(_Outputs): - """Intermediate class used to get outputs from boundary_condition_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.boundary_condition_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_results_info = op.outputs.results_info() + """Intermediate class used to get outputs from + boundary_condition_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.boundary_condition_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_results_info = op.outputs.results_info() """ + def __init__(self, op: Operator): super().__init__(boundary_condition_provider._spec().outputs, op) - self.results_info_as_field = Output( _modify_output_spec_with_one_type(boundary_condition_provider._spec().output_pin(0), "field"), 0, op) + self.results_info_as_field = Output( + _modify_output_spec_with_one_type( + boundary_condition_provider._spec().output_pin(0), "field" + ), + 0, + op, + ) self._outputs.append(self.results_info_as_field) - self.results_info_as_fields_container = Output( _modify_output_spec_with_one_type(boundary_condition_provider._spec().output_pin(0), "fields_container"), 0, op) + self.results_info_as_fields_container = Output( + _modify_output_spec_with_one_type( + boundary_condition_provider._spec().output_pin(0), "fields_container" + ), + 0, + op, + ) self._outputs.append(self.results_info_as_fields_container) - diff --git a/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py b/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py index 858c380045c..79cdb458c81 100644 --- a/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py +++ b/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py @@ -1,75 +1,134 @@ """ cyclic_mesh_expansion -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "metadata" category -""" class cyclic_mesh_expansion(Operator): """Expand the mesh. - available inputs: - - sector_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) - - sectors_to_expand (list, Scoping, ScopingsContainer) (optional) - - available outputs: - - meshed_region (MeshedRegion) - - cyclic_support (CyclicSupport) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.metadata.cyclic_mesh_expansion() - - >>> # Make input connections - >>> my_sector_meshed_region = dpf.MeshedRegion() - >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.cyclic_mesh_expansion(sector_meshed_region=my_sector_meshed_region,cyclic_support=my_cyclic_support,sectors_to_expand=my_sectors_to_expand) - - >>> # Get output data - >>> result_meshed_region = op.outputs.meshed_region() - >>> result_cyclic_support = op.outputs.cyclic_support()""" - def __init__(self, sector_meshed_region=None, cyclic_support=None, sectors_to_expand=None, config=None, server=None): - super().__init__(name="cyclic_expansion_mesh", config = config, server = server) + Parameters + ---------- + sector_meshed_region : MeshedRegion or MeshesContainer, optional + cyclic_support : CyclicSupport + sectors_to_expand : Scoping or ScopingsContainer, optional + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.metadata.cyclic_mesh_expansion() + + >>> # Make input connections + >>> my_sector_meshed_region = dpf.MeshedRegion() + >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.cyclic_mesh_expansion( + ... sector_meshed_region=my_sector_meshed_region, + ... cyclic_support=my_cyclic_support, + ... sectors_to_expand=my_sectors_to_expand, + ... ) + + >>> # Get output data + >>> result_meshed_region = op.outputs.meshed_region() + >>> result_cyclic_support = op.outputs.cyclic_support() + """ + + def __init__( + self, + sector_meshed_region=None, + cyclic_support=None, + sectors_to_expand=None, + config=None, + server=None, + ): + super().__init__(name="cyclic_expansion_mesh", config=config, server=server) self._inputs = InputsCyclicMeshExpansion(self) self._outputs = OutputsCyclicMeshExpansion(self) - if sector_meshed_region !=None: + if sector_meshed_region is not None: self.inputs.sector_meshed_region.connect(sector_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) - if sectors_to_expand !=None: + if sectors_to_expand is not None: self.inputs.sectors_to_expand.connect(sectors_to_expand) @staticmethod def _spec(): - spec = Specification(description="""Expand the mesh.""", - map_input_pin_spec={ - 7 : PinSpecification(name = "sector_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document=""""""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=False, document=""""""), - 18 : PinSpecification(name = "sectors_to_expand", type_names=["vector","scoping","scopings_container"], optional=True, document="""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "meshed_region", type_names=["abstract_meshed_region"], optional=False, document="""expanded meshed region."""), - 1 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=False, document="""input cyclic support modified in place containing the new expanded meshed regions.""")}) + description = """Expand the mesh.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 7: PinSpecification( + name="sector_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=False, + document="""""", + ), + 18: PinSpecification( + name="sectors_to_expand", + type_names=["vector", "scoping", "scopings_container"], + optional=True, + document="""Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="meshed_region", + type_names=["abstract_meshed_region"], + optional=False, + document="""Expanded meshed region.""", + ), + 1: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=False, + document="""Input cyclic support modified in place + containing the new expanded meshed + regions.""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cyclic_expansion_mesh") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cyclic_expansion_mesh", server=server) @property def inputs(self): @@ -77,167 +136,163 @@ def inputs(self): Returns -------- - inputs : InputsCyclicMeshExpansion + inputs : InputsCyclicMeshExpansion """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicMeshExpansion + outputs : OutputsCyclicMeshExpansion """ return super().outputs -#internal name: cyclic_expansion_mesh -#scripting name: cyclic_mesh_expansion class InputsCyclicMeshExpansion(_Inputs): - """Intermediate class used to connect user inputs to cyclic_mesh_expansion operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.cyclic_mesh_expansion() - >>> my_sector_meshed_region = dpf.MeshedRegion() - >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + """Intermediate class used to connect user inputs to + cyclic_mesh_expansion operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.cyclic_mesh_expansion() + >>> my_sector_meshed_region = dpf.MeshedRegion() + >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) """ + def __init__(self, op: Operator): super().__init__(cyclic_mesh_expansion._spec().inputs, op) - self._sector_meshed_region = Input(cyclic_mesh_expansion._spec().input_pin(7), 7, op, -1) + self._sector_meshed_region = Input( + cyclic_mesh_expansion._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._sector_meshed_region) - self._cyclic_support = Input(cyclic_mesh_expansion._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_mesh_expansion._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input(cyclic_mesh_expansion._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand = Input( + cyclic_mesh_expansion._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) @property def sector_meshed_region(self): - """Allows to connect sector_meshed_region input to the operator + """Allows to connect sector_meshed_region input to the operator. Parameters ---------- - my_sector_meshed_region : MeshedRegion, MeshesContainer, + my_sector_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_mesh_expansion() >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) - >>> #or + >>> # or >>> op.inputs.sector_meshed_region(my_sector_meshed_region) - """ return self._sector_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_mesh_expansion() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support @property def sectors_to_expand(self): - """Allows to connect sectors_to_expand input to the operator + """Allows to connect sectors_to_expand input to the operator. - - pindoc: sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. Parameters ---------- - my_sectors_to_expand : list, Scoping, ScopingsContainer, + my_sectors_to_expand : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_mesh_expansion() >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> #or + >>> # or >>> op.inputs.sectors_to_expand(my_sectors_to_expand) - """ return self._sectors_to_expand + class OutputsCyclicMeshExpansion(_Outputs): - """Intermediate class used to get outputs from cyclic_mesh_expansion operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.cyclic_mesh_expansion() - >>> # Connect inputs : op.inputs. ... - >>> result_meshed_region = op.outputs.meshed_region() - >>> result_cyclic_support = op.outputs.cyclic_support() + """Intermediate class used to get outputs from + cyclic_mesh_expansion operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.cyclic_mesh_expansion() + >>> # Connect inputs : op.inputs. ... + >>> result_meshed_region = op.outputs.meshed_region() + >>> result_cyclic_support = op.outputs.cyclic_support() """ + def __init__(self, op: Operator): super().__init__(cyclic_mesh_expansion._spec().outputs, op) - self._meshed_region = Output(cyclic_mesh_expansion._spec().output_pin(0), 0, op) + self._meshed_region = Output(cyclic_mesh_expansion._spec().output_pin(0), 0, op) self._outputs.append(self._meshed_region) - self._cyclic_support = Output(cyclic_mesh_expansion._spec().output_pin(1), 1, op) + self._cyclic_support = Output( + cyclic_mesh_expansion._spec().output_pin(1), 1, op + ) self._outputs.append(self._cyclic_support) @property def meshed_region(self): """Allows to get meshed_region output of the operator - - - pindoc: expanded meshed region. - Returns ---------- - my_meshed_region : MeshedRegion, + my_meshed_region : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_mesh_expansion() >>> # Connect inputs : op.inputs. ... - >>> result_meshed_region = op.outputs.meshed_region() - """ + >>> result_meshed_region = op.outputs.meshed_region() + """ # noqa: E501 return self._meshed_region @property def cyclic_support(self): """Allows to get cyclic_support output of the operator - - - pindoc: input cyclic support modified in place containing the new expanded meshed regions. - Returns ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_mesh_expansion() >>> # Connect inputs : op.inputs. ... - >>> result_cyclic_support = op.outputs.cyclic_support() - """ + >>> result_cyclic_support = op.outputs.cyclic_support() + """ # noqa: E501 return self._cyclic_support - diff --git a/ansys/dpf/core/operators/metadata/cyclic_support_provider.py b/ansys/dpf/core/operators/metadata/cyclic_support_provider.py index c6c375298a9..3b0a112550e 100644 --- a/ansys/dpf/core/operators/metadata/cyclic_support_provider.py +++ b/ansys/dpf/core/operators/metadata/cyclic_support_provider.py @@ -1,87 +1,170 @@ """ cyclic_support_provider -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "metadata" category -""" class cyclic_support_provider(Operator): - """Read the cyclic support (DPF entity containing necessary informations for expansions) and expands the mesh. - - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - sector_meshed_region (MeshedRegion, MeshesContainer) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - sectors_to_expand (Scoping, ScopingsContainer, list) (optional) - - available outputs: - - cyclic_support (CyclicSupport) - - sector_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.metadata.cyclic_support_provider() - - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_sector_meshed_region = dpf.MeshedRegion() - >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_sectors_to_expand = dpf.Scoping() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.cyclic_support_provider(streams_container=my_streams_container,data_sources=my_data_sources,sector_meshed_region=my_sector_meshed_region,expanded_meshed_region=my_expanded_meshed_region,sectors_to_expand=my_sectors_to_expand) - - >>> # Get output data - >>> result_cyclic_support = op.outputs.cyclic_support() - >>> result_sector_meshes = op.outputs.sector_meshes()""" - def __init__(self, streams_container=None, data_sources=None, sector_meshed_region=None, expanded_meshed_region=None, sectors_to_expand=None, config=None, server=None): - super().__init__(name="mapdl::rst::support_provider_cyclic", config = config, server = server) + """Read the cyclic support (DPF entity containing necessary information + for expansions) and expands the mesh. + + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + sector_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh of the first sector. + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + If this pin is set, expanding the mesh is not + necessary. + sectors_to_expand : Scoping or ScopingsContainer, optional + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.metadata.cyclic_support_provider() + + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_sector_meshed_region = dpf.MeshedRegion() + >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.cyclic_support_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... sector_meshed_region=my_sector_meshed_region, + ... expanded_meshed_region=my_expanded_meshed_region, + ... sectors_to_expand=my_sectors_to_expand, + ... ) + + >>> # Get output data + >>> result_cyclic_support = op.outputs.cyclic_support() + >>> result_sector_meshes = op.outputs.sector_meshes() + """ + + def __init__( + self, + streams_container=None, + data_sources=None, + sector_meshed_region=None, + expanded_meshed_region=None, + sectors_to_expand=None, + config=None, + server=None, + ): + super().__init__( + name="mapdl::rst::support_provider_cyclic", config=config, server=server + ) self._inputs = InputsCyclicSupportProvider(self) self._outputs = OutputsCyclicSupportProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if sector_meshed_region !=None: + if sector_meshed_region is not None: self.inputs.sector_meshed_region.connect(sector_meshed_region) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if sectors_to_expand !=None: + if sectors_to_expand is not None: self.inputs.sectors_to_expand.connect(sectors_to_expand) @staticmethod def _spec(): - spec = Specification(description="""Read the cyclic support (DPF entity containing necessary informations for expansions) and expands the mesh.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "sector_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the first sector."""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""if this pin is set, expanding the mesh is not necessary."""), - 18 : PinSpecification(name = "sectors_to_expand", type_names=["scoping","scopings_container","vector"], optional=True, document="""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=False, document=""""""), - 1 : PinSpecification(name = "sector_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = """Read the cyclic support (DPF entity containing necessary information + for expansions) and expands the mesh.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="sector_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the first sector.""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""If this pin is set, expanding the mesh is not + necessary.""", + ), + 18: PinSpecification( + name="sectors_to_expand", + type_names=["scoping", "scopings_container", "vector"], + optional=True, + document="""Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="sector_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::support_provider_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="mapdl::rst::support_provider_cyclic", server=server + ) @property def inputs(self): @@ -89,219 +172,222 @@ def inputs(self): Returns -------- - inputs : InputsCyclicSupportProvider + inputs : InputsCyclicSupportProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicSupportProvider + outputs : OutputsCyclicSupportProvider """ return super().outputs -#internal name: mapdl::rst::support_provider_cyclic -#scripting name: cyclic_support_provider class InputsCyclicSupportProvider(_Inputs): - """Intermediate class used to connect user inputs to cyclic_support_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.cyclic_support_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_sector_meshed_region = dpf.MeshedRegion() - >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_sectors_to_expand = dpf.Scoping() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + """Intermediate class used to connect user inputs to + cyclic_support_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.cyclic_support_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_sector_meshed_region = dpf.MeshedRegion() + >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) """ + def __init__(self, op: Operator): super().__init__(cyclic_support_provider._spec().inputs, op) - self._streams_container = Input(cyclic_support_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + cyclic_support_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_support_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + cyclic_support_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._sector_meshed_region = Input(cyclic_support_provider._spec().input_pin(7), 7, op, -1) + self._sector_meshed_region = Input( + cyclic_support_provider._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._sector_meshed_region) - self._expanded_meshed_region = Input(cyclic_support_provider._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_support_provider._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._sectors_to_expand = Input(cyclic_support_provider._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand = Input( + cyclic_support_provider._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_support_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_support_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def sector_meshed_region(self): - """Allows to connect sector_meshed_region input to the operator + """Allows to connect sector_meshed_region input to the operator. - - pindoc: mesh of the first sector. + Mesh of the first sector. Parameters ---------- - my_sector_meshed_region : MeshedRegion, MeshesContainer, + my_sector_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_support_provider() >>> op.inputs.sector_meshed_region.connect(my_sector_meshed_region) - >>> #or + >>> # or >>> op.inputs.sector_meshed_region(my_sector_meshed_region) - """ return self._sector_meshed_region @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: if this pin is set, expanding the mesh is not necessary. + If this pin is set, expanding the mesh is not + necessary. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_support_provider() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def sectors_to_expand(self): - """Allows to connect sectors_to_expand input to the operator + """Allows to connect sectors_to_expand input to the operator. - - pindoc: sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. Parameters ---------- - my_sectors_to_expand : Scoping, ScopingsContainer, list, + my_sectors_to_expand : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_support_provider() >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> #or + >>> # or >>> op.inputs.sectors_to_expand(my_sectors_to_expand) - """ return self._sectors_to_expand + class OutputsCyclicSupportProvider(_Outputs): - """Intermediate class used to get outputs from cyclic_support_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.cyclic_support_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_cyclic_support = op.outputs.cyclic_support() - >>> result_sector_meshes = op.outputs.sector_meshes() + """Intermediate class used to get outputs from + cyclic_support_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.cyclic_support_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_cyclic_support = op.outputs.cyclic_support() + >>> result_sector_meshes = op.outputs.sector_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_support_provider._spec().outputs, op) - self._cyclic_support = Output(cyclic_support_provider._spec().output_pin(0), 0, op) + self._cyclic_support = Output( + cyclic_support_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._cyclic_support) - self._sector_meshes = Output(cyclic_support_provider._spec().output_pin(1), 1, op) + self._sector_meshes = Output( + cyclic_support_provider._spec().output_pin(1), 1, op + ) self._outputs.append(self._sector_meshes) @property def cyclic_support(self): """Allows to get cyclic_support output of the operator - Returns ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_support_provider() >>> # Connect inputs : op.inputs. ... - >>> result_cyclic_support = op.outputs.cyclic_support() - """ + >>> result_cyclic_support = op.outputs.cyclic_support() + """ # noqa: E501 return self._cyclic_support @property def sector_meshes(self): """Allows to get sector_meshes output of the operator - Returns ---------- - my_sector_meshes : MeshesContainer, + my_sector_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.cyclic_support_provider() >>> # Connect inputs : op.inputs. ... - >>> result_sector_meshes = op.outputs.sector_meshes() - """ + >>> result_sector_meshes = op.outputs.sector_meshes() + """ # noqa: E501 return self._sector_meshes - diff --git a/ansys/dpf/core/operators/metadata/is_cyclic.py b/ansys/dpf/core/operators/metadata/is_cyclic.py index 5ff0faa54b5..70ff72e2322 100644 --- a/ansys/dpf/core/operators/metadata/is_cyclic.py +++ b/ansys/dpf/core/operators/metadata/is_cyclic.py @@ -1,66 +1,108 @@ """ is_cyclic -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class is_cyclic(Operator): """Read if the model is cyclic form the result file. - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams (result file container) (optional) + data_sources : DataSources + If the stream is null then we need to get the + file path from the data sources + - available outputs: - - file_path (str) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.metadata.is_cyclic() - >>> # Instantiate operator - >>> op = dpf.operators.metadata.is_cyclic() + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.is_cyclic( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.is_cyclic(streams_container=my_streams_container,data_sources=my_data_sources) + >>> # Get output data + >>> result_file_path = op.outputs.file_path() + """ - >>> # Get output data - >>> result_file_path = op.outputs.file_path()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="is_cyclic", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__(name="is_cyclic", config=config, server=server) self._inputs = InputsIsCyclic(self) self._outputs = OutputsIsCyclic(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read if the model is cyclic form the result file.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container) (optional)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "file_path", type_names=["string"], optional=False, document="""returns 'single_stage' or 'multi_stage' or an empty string for non cyclic model""")}) + description = """Read if the model is cyclic form the result file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container) (optional)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="file_path", + type_names=["string"], + optional=False, + document="""Returns 'single_stage' or 'multi_stage' or an + empty string for non cyclic model""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "is_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="is_cyclic", server=server) @property def inputs(self): @@ -68,121 +110,114 @@ def inputs(self): Returns -------- - inputs : InputsIsCyclic + inputs : InputsIsCyclic """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIsCyclic + outputs : OutputsIsCyclic """ return super().outputs -#internal name: is_cyclic -#scripting name: is_cyclic class InputsIsCyclic(_Inputs): - """Intermediate class used to connect user inputs to is_cyclic operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.is_cyclic() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + is_cyclic operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.is_cyclic() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(is_cyclic._spec().inputs, op) - self._streams_container = Input(is_cyclic._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(is_cyclic._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(is_cyclic._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(is_cyclic._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) (optional) + Streams (result file container) (optional) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.is_cyclic() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.is_cyclic() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsIsCyclic(_Outputs): - """Intermediate class used to get outputs from is_cyclic operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.is_cyclic() - >>> # Connect inputs : op.inputs. ... - >>> result_file_path = op.outputs.file_path() + """Intermediate class used to get outputs from + is_cyclic operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.is_cyclic() + >>> # Connect inputs : op.inputs. ... + >>> result_file_path = op.outputs.file_path() """ + def __init__(self, op: Operator): super().__init__(is_cyclic._spec().outputs, op) - self._file_path = Output(is_cyclic._spec().output_pin(0), 0, op) + self._file_path = Output(is_cyclic._spec().output_pin(0), 0, op) self._outputs.append(self._file_path) @property def file_path(self): """Allows to get file_path output of the operator - - - pindoc: returns 'single_stage' or 'multi_stage' or an empty string for non cyclic model - Returns ---------- - my_file_path : str, + my_file_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.is_cyclic() >>> # Connect inputs : op.inputs. ... - >>> result_file_path = op.outputs.file_path() - """ + >>> result_file_path = op.outputs.file_path() + """ # noqa: E501 return self._file_path - diff --git a/ansys/dpf/core/operators/metadata/material_provider.py b/ansys/dpf/core/operators/metadata/material_provider.py index 658809b1748..c25909c90a0 100644 --- a/ansys/dpf/core/operators/metadata/material_provider.py +++ b/ansys/dpf/core/operators/metadata/material_provider.py @@ -1,66 +1,109 @@ """ material_provider -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class material_provider(Operator): - """Read available materials and properties from the results files contained in the streams or data sources. - - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - available outputs: - - materials (Materials) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.metadata.material_provider() - - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.material_provider(streams_container=my_streams_container,data_sources=my_data_sources) + """Read available materials and properties from the results files + contained in the streams or data sources. + + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams (result file container) + data_sources : DataSources + If the stream is null then we need to get the + file path from the data sources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.metadata.material_provider() + + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.material_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_materials = op.outputs.materials() + """ - >>> # Get output data - >>> result_materials = op.outputs.materials()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="MaterialsProvider", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__(name="MaterialsProvider", config=config, server=server) self._inputs = InputsMaterialProvider(self) self._outputs = OutputsMaterialProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read available materials and properties from the results files contained in the streams or data sources.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "materials", type_names=["materials"], optional=False, document="""""")}) + description = """Read available materials and properties from the results files + contained in the streams or data sources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="materials", + type_names=["materials"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "MaterialsProvider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="MaterialsProvider", server=server) @property def inputs(self): @@ -68,119 +111,116 @@ def inputs(self): Returns -------- - inputs : InputsMaterialProvider + inputs : InputsMaterialProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMaterialProvider + outputs : OutputsMaterialProvider """ return super().outputs -#internal name: MaterialsProvider -#scripting name: material_provider class InputsMaterialProvider(_Inputs): - """Intermediate class used to connect user inputs to material_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.material_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + material_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.material_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(material_provider._spec().inputs, op) - self._streams_container = Input(material_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + material_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(material_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(material_provider._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) + Streams (result file container) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.material_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.material_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsMaterialProvider(_Outputs): - """Intermediate class used to get outputs from material_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.material_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_materials = op.outputs.materials() + """Intermediate class used to get outputs from + material_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.material_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_materials = op.outputs.materials() """ + def __init__(self, op: Operator): super().__init__(material_provider._spec().outputs, op) - self._materials = Output(material_provider._spec().output_pin(0), 0, op) + self._materials = Output(material_provider._spec().output_pin(0), 0, op) self._outputs.append(self._materials) @property def materials(self): """Allows to get materials output of the operator - Returns ---------- - my_materials : Materials, + my_materials : Materials Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.material_provider() >>> # Connect inputs : op.inputs. ... - >>> result_materials = op.outputs.materials() - """ + >>> result_materials = op.outputs.materials() + """ # noqa: E501 return self._materials - diff --git a/ansys/dpf/core/operators/metadata/material_support_provider.py b/ansys/dpf/core/operators/metadata/material_support_provider.py index 2929eabebec..ae4c6cd97ff 100644 --- a/ansys/dpf/core/operators/metadata/material_support_provider.py +++ b/ansys/dpf/core/operators/metadata/material_support_provider.py @@ -1,66 +1,107 @@ """ material_support_provider -========================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class material_support_provider(Operator): """Read the material support. - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams (result file container) (optional) + data_sources : DataSources + If the stream is null then we need to get the + file path from the data sources + - available outputs: - - abstract_field_support (AbstractFieldSupport) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.metadata.material_support_provider() - >>> # Instantiate operator - >>> op = dpf.operators.metadata.material_support_provider() + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.material_support_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.material_support_provider(streams_container=my_streams_container,data_sources=my_data_sources) + >>> # Get output data + >>> result_abstract_field_support = op.outputs.abstract_field_support() + """ - >>> # Get output data - >>> result_abstract_field_support = op.outputs.abstract_field_support()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="mat_support_provider", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__(name="mat_support_provider", config=config, server=server) self._inputs = InputsMaterialSupportProvider(self) self._outputs = OutputsMaterialSupportProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read the material support.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container) (optional)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "abstract_field_support", type_names=["abstract_field_support"], optional=False, document="""""")}) + description = """Read the material support.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container) (optional)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="abstract_field_support", + type_names=["abstract_field_support"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mat_support_provider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mat_support_provider", server=server) @property def inputs(self): @@ -68,119 +109,120 @@ def inputs(self): Returns -------- - inputs : InputsMaterialSupportProvider + inputs : InputsMaterialSupportProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMaterialSupportProvider + outputs : OutputsMaterialSupportProvider """ return super().outputs -#internal name: mat_support_provider -#scripting name: material_support_provider class InputsMaterialSupportProvider(_Inputs): - """Intermediate class used to connect user inputs to material_support_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.material_support_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + material_support_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.material_support_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(material_support_provider._spec().inputs, op) - self._streams_container = Input(material_support_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + material_support_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(material_support_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + material_support_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) (optional) + Streams (result file container) (optional) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.material_support_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.material_support_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsMaterialSupportProvider(_Outputs): - """Intermediate class used to get outputs from material_support_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.material_support_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_abstract_field_support = op.outputs.abstract_field_support() + """Intermediate class used to get outputs from + material_support_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.material_support_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_abstract_field_support = op.outputs.abstract_field_support() """ + def __init__(self, op: Operator): super().__init__(material_support_provider._spec().outputs, op) - self._abstract_field_support = Output(material_support_provider._spec().output_pin(0), 0, op) + self._abstract_field_support = Output( + material_support_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._abstract_field_support) @property def abstract_field_support(self): """Allows to get abstract_field_support output of the operator - Returns ---------- - my_abstract_field_support : AbstractFieldSupport, + my_abstract_field_support : AbstractFieldSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.material_support_provider() >>> # Connect inputs : op.inputs. ... - >>> result_abstract_field_support = op.outputs.abstract_field_support() - """ + >>> result_abstract_field_support = op.outputs.abstract_field_support() + """ # noqa: E501 return self._abstract_field_support - diff --git a/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py b/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py index 4cb3149a28e..9601dc8e55a 100644 --- a/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py +++ b/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py @@ -1,66 +1,115 @@ """ mesh_selection_manager_provider -=============================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class mesh_selection_manager_provider(Operator): - """Read mesh properties from the results files contained in the streams or data sources and make those properties available through a mesh selection manager in output. - - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - available outputs: - - mesh_selection_manager (MeshSelectionManager) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.metadata.mesh_selection_manager_provider() - - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.mesh_selection_manager_provider(streams_container=my_streams_container,data_sources=my_data_sources) + """Read mesh properties from the results files contained in the streams + or data sources and make those properties available through a mesh + selection manager in output. + + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams (result file container) (optional) + data_sources : DataSources + If the stream is null then we need to get the + file path from the data sources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.metadata.mesh_selection_manager_provider() + + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.mesh_selection_manager_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_mesh_selection_manager = op.outputs.mesh_selection_manager() + """ - >>> # Get output data - >>> result_mesh_selection_manager = op.outputs.mesh_selection_manager()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="MeshSelectionManagerProvider", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__( + name="MeshSelectionManagerProvider", config=config, server=server + ) self._inputs = InputsMeshSelectionManagerProvider(self) self._outputs = OutputsMeshSelectionManagerProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read mesh properties from the results files contained in the streams or data sources and make those properties available through a mesh selection manager in output.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container) (optional)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_selection_manager", type_names=["mesh_selection_manager"], optional=False, document="""""")}) + description = """Read mesh properties from the results files contained in the streams + or data sources and make those properties available + through a mesh selection manager in output.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container) (optional)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_selection_manager", + type_names=["mesh_selection_manager"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "MeshSelectionManagerProvider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="MeshSelectionManagerProvider", server=server + ) @property def inputs(self): @@ -68,119 +117,120 @@ def inputs(self): Returns -------- - inputs : InputsMeshSelectionManagerProvider + inputs : InputsMeshSelectionManagerProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMeshSelectionManagerProvider + outputs : OutputsMeshSelectionManagerProvider """ return super().outputs -#internal name: MeshSelectionManagerProvider -#scripting name: mesh_selection_manager_provider class InputsMeshSelectionManagerProvider(_Inputs): - """Intermediate class used to connect user inputs to mesh_selection_manager_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.mesh_selection_manager_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + mesh_selection_manager_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.mesh_selection_manager_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(mesh_selection_manager_provider._spec().inputs, op) - self._streams_container = Input(mesh_selection_manager_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + mesh_selection_manager_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(mesh_selection_manager_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + mesh_selection_manager_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) (optional) + Streams (result file container) (optional) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.mesh_selection_manager_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.mesh_selection_manager_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsMeshSelectionManagerProvider(_Outputs): - """Intermediate class used to get outputs from mesh_selection_manager_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.mesh_selection_manager_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_selection_manager = op.outputs.mesh_selection_manager() + """Intermediate class used to get outputs from + mesh_selection_manager_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.mesh_selection_manager_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_selection_manager = op.outputs.mesh_selection_manager() """ + def __init__(self, op: Operator): super().__init__(mesh_selection_manager_provider._spec().outputs, op) - self._mesh_selection_manager = Output(mesh_selection_manager_provider._spec().output_pin(0), 0, op) + self._mesh_selection_manager = Output( + mesh_selection_manager_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._mesh_selection_manager) @property def mesh_selection_manager(self): """Allows to get mesh_selection_manager output of the operator - Returns ---------- - my_mesh_selection_manager : MeshSelectionManager, + my_mesh_selection_manager : MeshSelectionManager Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.mesh_selection_manager_provider() >>> # Connect inputs : op.inputs. ... - >>> result_mesh_selection_manager = op.outputs.mesh_selection_manager() - """ + >>> result_mesh_selection_manager = op.outputs.mesh_selection_manager() + """ # noqa: E501 return self._mesh_selection_manager - diff --git a/ansys/dpf/core/operators/metadata/mesh_support_provider.py b/ansys/dpf/core/operators/metadata/mesh_support_provider.py index 596f4e25264..d44496e9a17 100644 --- a/ansys/dpf/core/operators/metadata/mesh_support_provider.py +++ b/ansys/dpf/core/operators/metadata/mesh_support_provider.py @@ -1,66 +1,107 @@ """ mesh_support_provider -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class mesh_support_provider(Operator): """Read the mesh support. - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams (result file container) (optional) + data_sources : DataSources + If the stream is null then we need to get the + file path from the data sources + - available outputs: - - abstract_field_support (AbstractFieldSupport) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.metadata.mesh_support_provider() - >>> # Instantiate operator - >>> op = dpf.operators.metadata.mesh_support_provider() + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.mesh_support_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.mesh_support_provider(streams_container=my_streams_container,data_sources=my_data_sources) + >>> # Get output data + >>> result_abstract_field_support = op.outputs.abstract_field_support() + """ - >>> # Get output data - >>> result_abstract_field_support = op.outputs.abstract_field_support()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="mesh_support_provider", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__(name="mesh_support_provider", config=config, server=server) self._inputs = InputsMeshSupportProvider(self) self._outputs = OutputsMeshSupportProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read the mesh support.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container) (optional)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "abstract_field_support", type_names=["abstract_field_support"], optional=False, document="""""")}) + description = """Read the mesh support.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container) (optional)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="abstract_field_support", + type_names=["abstract_field_support"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mesh_support_provider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mesh_support_provider", server=server) @property def inputs(self): @@ -68,119 +109,120 @@ def inputs(self): Returns -------- - inputs : InputsMeshSupportProvider + inputs : InputsMeshSupportProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMeshSupportProvider + outputs : OutputsMeshSupportProvider """ return super().outputs -#internal name: mesh_support_provider -#scripting name: mesh_support_provider class InputsMeshSupportProvider(_Inputs): - """Intermediate class used to connect user inputs to mesh_support_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.mesh_support_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + mesh_support_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.mesh_support_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(mesh_support_provider._spec().inputs, op) - self._streams_container = Input(mesh_support_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + mesh_support_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(mesh_support_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + mesh_support_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) (optional) + Streams (result file container) (optional) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.mesh_support_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.mesh_support_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsMeshSupportProvider(_Outputs): - """Intermediate class used to get outputs from mesh_support_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.mesh_support_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_abstract_field_support = op.outputs.abstract_field_support() + """Intermediate class used to get outputs from + mesh_support_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.mesh_support_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_abstract_field_support = op.outputs.abstract_field_support() """ + def __init__(self, op: Operator): super().__init__(mesh_support_provider._spec().outputs, op) - self._abstract_field_support = Output(mesh_support_provider._spec().output_pin(0), 0, op) + self._abstract_field_support = Output( + mesh_support_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._abstract_field_support) @property def abstract_field_support(self): """Allows to get abstract_field_support output of the operator - Returns ---------- - my_abstract_field_support : AbstractFieldSupport, + my_abstract_field_support : AbstractFieldSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.mesh_support_provider() >>> # Connect inputs : op.inputs. ... - >>> result_abstract_field_support = op.outputs.abstract_field_support() - """ + >>> result_abstract_field_support = op.outputs.abstract_field_support() + """ # noqa: E501 return self._abstract_field_support - diff --git a/ansys/dpf/core/operators/metadata/result_info_provider.py b/ansys/dpf/core/operators/metadata/result_info_provider.py index 44571d340e9..d034f5987c5 100644 --- a/ansys/dpf/core/operators/metadata/result_info_provider.py +++ b/ansys/dpf/core/operators/metadata/result_info_provider.py @@ -1,66 +1,111 @@ """ result_info_provider -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class result_info_provider(Operator): - """Read the result info with information sucha as available results or unit system from the results files contained in the streams or data sources. - - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - available outputs: - - result_info (ResultInfo) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.metadata.result_info_provider() - - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.result_info_provider(streams_container=my_streams_container,data_sources=my_data_sources) + """Read the result info with information sucha as available results or + unit system from the results files contained in the streams or + data sources. + + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams (result file container) (optional) + data_sources : DataSources + If the stream is null then we need to get the + file path from the data sources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.metadata.result_info_provider() + + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.result_info_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_result_info = op.outputs.result_info() + """ - >>> # Get output data - >>> result_result_info = op.outputs.result_info()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="ResultInfoProvider", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__(name="ResultInfoProvider", config=config, server=server) self._inputs = InputsResultInfoProvider(self) self._outputs = OutputsResultInfoProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read the result info with information sucha as available results or unit system from the results files contained in the streams or data sources.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container) (optional)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "result_info", type_names=["result_info"], optional=False, document="""""")}) + description = """Read the result info with information sucha as available results or + unit system from the results files contained in the + streams or data sources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container) (optional)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="result_info", + type_names=["result_info"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ResultInfoProvider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ResultInfoProvider", server=server) @property def inputs(self): @@ -68,119 +113,116 @@ def inputs(self): Returns -------- - inputs : InputsResultInfoProvider + inputs : InputsResultInfoProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsResultInfoProvider + outputs : OutputsResultInfoProvider """ return super().outputs -#internal name: ResultInfoProvider -#scripting name: result_info_provider class InputsResultInfoProvider(_Inputs): - """Intermediate class used to connect user inputs to result_info_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.result_info_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + result_info_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.result_info_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(result_info_provider._spec().inputs, op) - self._streams_container = Input(result_info_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + result_info_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(result_info_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(result_info_provider._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) (optional) + Streams (result file container) (optional) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.result_info_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.result_info_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsResultInfoProvider(_Outputs): - """Intermediate class used to get outputs from result_info_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.result_info_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_result_info = op.outputs.result_info() + """Intermediate class used to get outputs from + result_info_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.result_info_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_result_info = op.outputs.result_info() """ + def __init__(self, op: Operator): super().__init__(result_info_provider._spec().outputs, op) - self._result_info = Output(result_info_provider._spec().output_pin(0), 0, op) + self._result_info = Output(result_info_provider._spec().output_pin(0), 0, op) self._outputs.append(self._result_info) @property def result_info(self): """Allows to get result_info output of the operator - Returns ---------- - my_result_info : ResultInfo, + my_result_info : ResultInfo Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.result_info_provider() >>> # Connect inputs : op.inputs. ... - >>> result_result_info = op.outputs.result_info() - """ + >>> result_result_info = op.outputs.result_info() + """ # noqa: E501 return self._result_info - diff --git a/ansys/dpf/core/operators/metadata/streams_provider.py b/ansys/dpf/core/operators/metadata/streams_provider.py index eefaf4bb5e0..2c4a13086c6 100644 --- a/ansys/dpf/core/operators/metadata/streams_provider.py +++ b/ansys/dpf/core/operators/metadata/streams_provider.py @@ -1,60 +1,89 @@ """ streams_provider -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class streams_provider(Operator): """Creates streams (files with cache) from the data sources. - available inputs: - - data_sources (DataSources) + Parameters + ---------- + data_sources : DataSources + - available outputs: - - streams_container (StreamsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.metadata.streams_provider() - >>> # Instantiate operator - >>> op = dpf.operators.metadata.streams_provider() + >>> # Make input connections + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.streams_provider( + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.streams_provider(data_sources=my_data_sources) + >>> # Get output data + >>> result_streams_container = op.outputs.streams_container() + """ - >>> # Get output data - >>> result_streams_container = op.outputs.streams_container()""" def __init__(self, data_sources=None, config=None, server=None): - super().__init__(name="stream_provider", config = config, server = server) + super().__init__(name="stream_provider", config=config, server=server) self._inputs = InputsStreamsProvider(self) self._outputs = OutputsStreamsProvider(self) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Creates streams (files with cache) from the data sources.""", - map_input_pin_spec={ - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=False, document="""""")}) + description = """Creates streams (files with cache) from the data sources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "stream_provider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="stream_provider", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsStreamsProvider + inputs : InputsStreamsProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStreamsProvider + outputs : OutputsStreamsProvider """ return super().outputs -#internal name: stream_provider -#scripting name: streams_provider class InputsStreamsProvider(_Inputs): - """Intermediate class used to connect user inputs to streams_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.streams_provider() - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + streams_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.streams_provider() + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(streams_provider._spec().inputs, op) - self._data_sources = Input(streams_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(streams_provider._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.streams_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsStreamsProvider(_Outputs): - """Intermediate class used to get outputs from streams_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.streams_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_streams_container = op.outputs.streams_container() + """Intermediate class used to get outputs from + streams_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.streams_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_streams_container = op.outputs.streams_container() """ + def __init__(self, op: Operator): super().__init__(streams_provider._spec().outputs, op) - self._streams_container = Output(streams_provider._spec().output_pin(0), 0, op) + self._streams_container = Output(streams_provider._spec().output_pin(0), 0, op) self._outputs.append(self._streams_container) @property def streams_container(self): """Allows to get streams_container output of the operator - Returns ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.streams_provider() >>> # Connect inputs : op.inputs. ... - >>> result_streams_container = op.outputs.streams_container() - """ + >>> result_streams_container = op.outputs.streams_container() + """ # noqa: E501 return self._streams_container - diff --git a/ansys/dpf/core/operators/metadata/time_freq_provider.py b/ansys/dpf/core/operators/metadata/time_freq_provider.py index ae64d7b6756..1943006d8e8 100644 --- a/ansys/dpf/core/operators/metadata/time_freq_provider.py +++ b/ansys/dpf/core/operators/metadata/time_freq_provider.py @@ -1,66 +1,109 @@ """ time_freq_provider -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "metadata" category -""" class time_freq_provider(Operator): - """Read the time freq support from the results files contained in the streams or data sources. - - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - available outputs: - - time_freq_support (TimeFreqSupport) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.metadata.time_freq_provider() - - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.metadata.time_freq_provider(streams_container=my_streams_container,data_sources=my_data_sources) + """Read the time freq support from the results files contained in the + streams or data sources. + + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams (result file container) (optional) + data_sources : DataSources + If the stream is null then we need to get the + file path from the data sources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.metadata.time_freq_provider() + + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.time_freq_provider( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_time_freq_support = op.outputs.time_freq_support() + """ - >>> # Get output data - >>> result_time_freq_support = op.outputs.time_freq_support()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="TimeFreqSupportProvider", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__(name="TimeFreqSupportProvider", config=config, server=server) self._inputs = InputsTimeFreqProvider(self) self._outputs = OutputsTimeFreqProvider(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read the time freq support from the results files contained in the streams or data sources.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container) (optional)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "time_freq_support", type_names=["time_freq_support"], optional=False, document="""""")}) + description = """Read the time freq support from the results files contained in the + streams or data sources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container) (optional)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="time_freq_support", + type_names=["time_freq_support"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "TimeFreqSupportProvider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="TimeFreqSupportProvider", server=server) @property def inputs(self): @@ -68,119 +111,118 @@ def inputs(self): Returns -------- - inputs : InputsTimeFreqProvider + inputs : InputsTimeFreqProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsTimeFreqProvider + outputs : OutputsTimeFreqProvider """ return super().outputs -#internal name: TimeFreqSupportProvider -#scripting name: time_freq_provider class InputsTimeFreqProvider(_Inputs): - """Intermediate class used to connect user inputs to time_freq_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.time_freq_provider() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + time_freq_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.time_freq_provider() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(time_freq_provider._spec().inputs, op) - self._streams_container = Input(time_freq_provider._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + time_freq_provider._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(time_freq_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(time_freq_provider._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) (optional) + Streams (result file container) (optional) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.time_freq_provider() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.time_freq_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsTimeFreqProvider(_Outputs): - """Intermediate class used to get outputs from time_freq_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.metadata.time_freq_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_time_freq_support = op.outputs.time_freq_support() + """Intermediate class used to get outputs from + time_freq_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.time_freq_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_time_freq_support = op.outputs.time_freq_support() """ + def __init__(self, op: Operator): super().__init__(time_freq_provider._spec().outputs, op) - self._time_freq_support = Output(time_freq_provider._spec().output_pin(0), 0, op) + self._time_freq_support = Output( + time_freq_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._time_freq_support) @property def time_freq_support(self): """Allows to get time_freq_support output of the operator - Returns ---------- - my_time_freq_support : TimeFreqSupport, + my_time_freq_support : TimeFreqSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.metadata.time_freq_provider() >>> # Connect inputs : op.inputs. ... - >>> result_time_freq_support = op.outputs.time_freq_support() - """ + >>> result_time_freq_support = op.outputs.time_freq_support() + """ # noqa: E501 return self._time_freq_support - diff --git a/ansys/dpf/core/operators/min_max/max_by_component.py b/ansys/dpf/core/operators/min_max/max_by_component.py index 49df8d8057d..02be083eb79 100644 --- a/ansys/dpf/core/operators/min_max/max_by_component.py +++ b/ansys/dpf/core/operators/min_max/max_by_component.py @@ -1,78 +1,161 @@ """ max_by_component -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class max_by_component(Operator): """Give the maximum for each element rank by comparing several fields. - available inputs: - - use_absolute_value (bool) - - fieldA1 (Field, FieldsContainer) - - fieldA2 (Field, FieldsContainer) - - fieldB2 (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.max_by_component() - - >>> # Make input connections - >>> my_use_absolute_value = bool() - >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) - >>> my_fieldA1 = dpf.Field() - >>> op.inputs.fieldA1.connect(my_fieldA1) - >>> my_fieldA2 = dpf.Field() - >>> op.inputs.fieldA2.connect(my_fieldA2) - >>> my_fieldB2 = dpf.Field() - >>> op.inputs.fieldB2.connect(my_fieldB2) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.max_by_component(use_absolute_value=my_use_absolute_value,fieldA1=my_fieldA1,fieldA2=my_fieldA2,fieldB2=my_fieldB2) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, use_absolute_value=None, fieldA1=None, fieldA2=None, fieldB2=None, config=None, server=None): - super().__init__(name="max_by_component", config = config, server = server) + Parameters + ---------- + use_absolute_value : bool + Use_absolute_value + fieldA1 : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldA2 : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB1 : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB2 : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.max_by_component() + + >>> # Make input connections + >>> my_use_absolute_value = bool() + >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) + >>> my_fieldA1 = dpf.Field() + >>> op.inputs.fieldA1.connect(my_fieldA1) + >>> my_fieldA2 = dpf.Field() + >>> op.inputs.fieldA2.connect(my_fieldA2) + >>> my_fieldB1 = dpf.Field() + >>> op.inputs.fieldB1.connect(my_fieldB1) + >>> my_fieldB2 = dpf.Field() + >>> op.inputs.fieldB2.connect(my_fieldB2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.max_by_component( + ... use_absolute_value=my_use_absolute_value, + ... fieldA1=my_fieldA1, + ... fieldA2=my_fieldA2, + ... fieldB1=my_fieldB1, + ... fieldB2=my_fieldB2, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + use_absolute_value=None, + fieldA1=None, + fieldA2=None, + fieldB1=None, + fieldB2=None, + config=None, + server=None, + ): + super().__init__(name="max_by_component", config=config, server=server) self._inputs = InputsMaxByComponent(self) self._outputs = OutputsMaxByComponent(self) - if use_absolute_value !=None: + if use_absolute_value is not None: self.inputs.use_absolute_value.connect(use_absolute_value) - if fieldA1 !=None: + if fieldA1 is not None: self.inputs.fieldA1.connect(fieldA1) - if fieldA2 !=None: + if fieldA2 is not None: self.inputs.fieldA2.connect(fieldA2) - if fieldB2 !=None: + if fieldB1 is not None: + self.inputs.fieldB1.connect(fieldB1) + if fieldB2 is not None: self.inputs.fieldB2.connect(fieldB2) @staticmethod def _spec(): - spec = Specification(description="""Give the maximum for each element rank by comparing several fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "use_absolute_value", type_names=["bool"], optional=False, document="""use_absolute_value"""), - 1 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 2 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 3 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = ( + """Give the maximum for each element rank by comparing several fields.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="use_absolute_value", + type_names=["bool"], + optional=False, + document="""Use_absolute_value""", + ), + 1: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 2: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 2: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 3: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "max_by_component") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="max_by_component", server=server) @property def inputs(self): @@ -80,171 +163,191 @@ def inputs(self): Returns -------- - inputs : InputsMaxByComponent + inputs : InputsMaxByComponent """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMaxByComponent + outputs : OutputsMaxByComponent """ return super().outputs -#internal name: max_by_component -#scripting name: max_by_component class InputsMaxByComponent(_Inputs): - """Intermediate class used to connect user inputs to max_by_component operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.max_by_component() - >>> my_use_absolute_value = bool() - >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) - >>> my_fieldA1 = dpf.Field() - >>> op.inputs.fieldA1.connect(my_fieldA1) - >>> my_fieldA2 = dpf.Field() - >>> op.inputs.fieldA2.connect(my_fieldA2) - >>> my_fieldB2 = dpf.Field() - >>> op.inputs.fieldB2.connect(my_fieldB2) + """Intermediate class used to connect user inputs to + max_by_component operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.max_by_component() + >>> my_use_absolute_value = bool() + >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) + >>> my_fieldA1 = dpf.Field() + >>> op.inputs.fieldA1.connect(my_fieldA1) + >>> my_fieldA2 = dpf.Field() + >>> op.inputs.fieldA2.connect(my_fieldA2) + >>> my_fieldB1 = dpf.Field() + >>> op.inputs.fieldB1.connect(my_fieldB1) + >>> my_fieldB2 = dpf.Field() + >>> op.inputs.fieldB2.connect(my_fieldB2) """ + def __init__(self, op: Operator): super().__init__(max_by_component._spec().inputs, op) - self._use_absolute_value = Input(max_by_component._spec().input_pin(0), 0, op, -1) + self._use_absolute_value = Input( + max_by_component._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._use_absolute_value) - self._fieldA1 = Input(max_by_component._spec().input_pin(1), 1, op, 0) + self._fieldA1 = Input(max_by_component._spec().input_pin(1), 1, op, 0) self._inputs.append(self._fieldA1) - self._fieldA2 = Input(max_by_component._spec().input_pin(2), 2, op, 1) + self._fieldA2 = Input(max_by_component._spec().input_pin(2), 2, op, 1) self._inputs.append(self._fieldA2) - self._fieldB2 = Input(max_by_component._spec().input_pin(3), 3, op, 1) + self._fieldB1 = Input(max_by_component._spec().input_pin(2), 2, op, 0) + self._inputs.append(self._fieldB1) + self._fieldB2 = Input(max_by_component._spec().input_pin(3), 3, op, 1) self._inputs.append(self._fieldB2) @property def use_absolute_value(self): - """Allows to connect use_absolute_value input to the operator + """Allows to connect use_absolute_value input to the operator. - - pindoc: use_absolute_value + Use_absolute_value Parameters ---------- - my_use_absolute_value : bool, + my_use_absolute_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_by_component() >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) - >>> #or + >>> # or >>> op.inputs.use_absolute_value(my_use_absolute_value) - """ return self._use_absolute_value @property def fieldA1(self): - """Allows to connect fieldA1 input to the operator + """Allows to connect fieldA1 input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA1 : Field, FieldsContainer, + my_fieldA1 : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_by_component() >>> op.inputs.fieldA1.connect(my_fieldA1) - >>> #or + >>> # or >>> op.inputs.fieldA1(my_fieldA1) - """ return self._fieldA1 @property def fieldA2(self): - """Allows to connect fieldA2 input to the operator + """Allows to connect fieldA2 input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA2 : Field, FieldsContainer, + my_fieldA2 : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_by_component() >>> op.inputs.fieldA2.connect(my_fieldA2) - >>> #or + >>> # or >>> op.inputs.fieldA2(my_fieldA2) - """ return self._fieldA2 @property - def fieldB2(self): - """Allows to connect fieldB2 input to the operator + def fieldB1(self): + """Allows to connect fieldB1 input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB2 : Field, FieldsContainer, + my_fieldB1 : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.max_by_component() + >>> op.inputs.fieldB1.connect(my_fieldB1) + >>> # or + >>> op.inputs.fieldB1(my_fieldB1) + """ + return self._fieldB1 + + @property + def fieldB2(self): + """Allows to connect fieldB2 input to the operator. + + Field or fields container with only one field + is expected + Parameters + ---------- + my_fieldB2 : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf >>> op = dpf.operators.min_max.max_by_component() >>> op.inputs.fieldB2.connect(my_fieldB2) - >>> #or + >>> # or >>> op.inputs.fieldB2(my_fieldB2) - """ return self._fieldB2 + class OutputsMaxByComponent(_Outputs): - """Intermediate class used to get outputs from max_by_component operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.max_by_component() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + max_by_component operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.max_by_component() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(max_by_component._spec().outputs, op) - self._field = Output(max_by_component._spec().output_pin(0), 0, op) + self._field = Output(max_by_component._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_by_component() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/min_max/max_over_phase.py b/ansys/dpf/core/operators/min_max/max_over_phase.py index ba78d090b0d..d3dd0b0777d 100644 --- a/ansys/dpf/core/operators/min_max/max_over_phase.py +++ b/ansys/dpf/core/operators/min_max/max_over_phase.py @@ -1,78 +1,139 @@ """ max_over_phase -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class max_over_phase(Operator): - """Returns, for each entity, the maximum value of (real value * cos(theta) - imaginary value * sin(theta)) for theta in [0, 360]degrees with the increment in input. - - available inputs: - - real_field (Field) - - imaginary_field (Field) - - abs_value (bool) (optional) - - phase_increment (float) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.max_over_phase() - - >>> # Make input connections - >>> my_real_field = dpf.Field() - >>> op.inputs.real_field.connect(my_real_field) - >>> my_imaginary_field = dpf.Field() - >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_phase_increment = float() - >>> op.inputs.phase_increment.connect(my_phase_increment) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.max_over_phase(real_field=my_real_field,imaginary_field=my_imaginary_field,abs_value=my_abs_value,phase_increment=my_phase_increment) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, real_field=None, imaginary_field=None, abs_value=None, phase_increment=None, config=None, server=None): - super().__init__(name="max_over_phase", config = config, server = server) + """Returns, for each entity, the maximum value of (real value * + cos(theta) - imaginary value * sin(theta)) for theta in [0, + 360]degrees with the increment in input. + + Parameters + ---------- + real_field : Field + imaginary_field : Field + abs_value : bool, optional + Should use absolute value. + phase_increment : float, optional + Phase increment (default is 10.0 degrees). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.max_over_phase() + + >>> # Make input connections + >>> my_real_field = dpf.Field() + >>> op.inputs.real_field.connect(my_real_field) + >>> my_imaginary_field = dpf.Field() + >>> op.inputs.imaginary_field.connect(my_imaginary_field) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_phase_increment = float() + >>> op.inputs.phase_increment.connect(my_phase_increment) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.max_over_phase( + ... real_field=my_real_field, + ... imaginary_field=my_imaginary_field, + ... abs_value=my_abs_value, + ... phase_increment=my_phase_increment, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + real_field=None, + imaginary_field=None, + abs_value=None, + phase_increment=None, + config=None, + server=None, + ): + super().__init__(name="max_over_phase", config=config, server=server) self._inputs = InputsMaxOverPhase(self) self._outputs = OutputsMaxOverPhase(self) - if real_field !=None: + if real_field is not None: self.inputs.real_field.connect(real_field) - if imaginary_field !=None: + if imaginary_field is not None: self.inputs.imaginary_field.connect(imaginary_field) - if abs_value !=None: + if abs_value is not None: self.inputs.abs_value.connect(abs_value) - if phase_increment !=None: + if phase_increment is not None: self.inputs.phase_increment.connect(phase_increment) @staticmethod def _spec(): - spec = Specification(description="""Returns, for each entity, the maximum value of (real value * cos(theta) - imaginary value * sin(theta)) for theta in [0, 360]degrees with the increment in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "real_field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "imaginary_field", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "abs_value", type_names=["bool"], optional=True, document="""Should use absolute value."""), - 3 : PinSpecification(name = "phase_increment", type_names=["double"], optional=True, document="""Phase increment (default is 10.0 degrees).""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Returns, for each entity, the maximum value of (real value * + cos(theta) - imaginary value * sin(theta)) for theta in + [0, 360]degrees with the increment in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="real_field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="imaginary_field", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="abs_value", + type_names=["bool"], + optional=True, + document="""Should use absolute value.""", + ), + 3: PinSpecification( + name="phase_increment", + type_names=["double"], + optional=True, + document="""Phase increment (default is 10.0 degrees).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "max_over_phase") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="max_over_phase", server=server) @property def inputs(self): @@ -80,167 +141,157 @@ def inputs(self): Returns -------- - inputs : InputsMaxOverPhase + inputs : InputsMaxOverPhase """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMaxOverPhase + outputs : OutputsMaxOverPhase """ return super().outputs -#internal name: max_over_phase -#scripting name: max_over_phase class InputsMaxOverPhase(_Inputs): - """Intermediate class used to connect user inputs to max_over_phase operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.max_over_phase() - >>> my_real_field = dpf.Field() - >>> op.inputs.real_field.connect(my_real_field) - >>> my_imaginary_field = dpf.Field() - >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_phase_increment = float() - >>> op.inputs.phase_increment.connect(my_phase_increment) + """Intermediate class used to connect user inputs to + max_over_phase operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.max_over_phase() + >>> my_real_field = dpf.Field() + >>> op.inputs.real_field.connect(my_real_field) + >>> my_imaginary_field = dpf.Field() + >>> op.inputs.imaginary_field.connect(my_imaginary_field) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_phase_increment = float() + >>> op.inputs.phase_increment.connect(my_phase_increment) """ + def __init__(self, op: Operator): super().__init__(max_over_phase._spec().inputs, op) - self._real_field = Input(max_over_phase._spec().input_pin(0), 0, op, -1) + self._real_field = Input(max_over_phase._spec().input_pin(0), 0, op, -1) self._inputs.append(self._real_field) - self._imaginary_field = Input(max_over_phase._spec().input_pin(1), 1, op, -1) + self._imaginary_field = Input(max_over_phase._spec().input_pin(1), 1, op, -1) self._inputs.append(self._imaginary_field) - self._abs_value = Input(max_over_phase._spec().input_pin(2), 2, op, -1) + self._abs_value = Input(max_over_phase._spec().input_pin(2), 2, op, -1) self._inputs.append(self._abs_value) - self._phase_increment = Input(max_over_phase._spec().input_pin(3), 3, op, -1) + self._phase_increment = Input(max_over_phase._spec().input_pin(3), 3, op, -1) self._inputs.append(self._phase_increment) @property def real_field(self): - """Allows to connect real_field input to the operator + """Allows to connect real_field input to the operator. Parameters ---------- - my_real_field : Field, + my_real_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_phase() >>> op.inputs.real_field.connect(my_real_field) - >>> #or + >>> # or >>> op.inputs.real_field(my_real_field) - """ return self._real_field @property def imaginary_field(self): - """Allows to connect imaginary_field input to the operator + """Allows to connect imaginary_field input to the operator. Parameters ---------- - my_imaginary_field : Field, + my_imaginary_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_phase() >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> #or + >>> # or >>> op.inputs.imaginary_field(my_imaginary_field) - """ return self._imaginary_field @property def abs_value(self): - """Allows to connect abs_value input to the operator + """Allows to connect abs_value input to the operator. - - pindoc: Should use absolute value. + Should use absolute value. Parameters ---------- - my_abs_value : bool, + my_abs_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_phase() >>> op.inputs.abs_value.connect(my_abs_value) - >>> #or + >>> # or >>> op.inputs.abs_value(my_abs_value) - """ return self._abs_value @property def phase_increment(self): - """Allows to connect phase_increment input to the operator + """Allows to connect phase_increment input to the operator. - - pindoc: Phase increment (default is 10.0 degrees). + Phase increment (default is 10.0 degrees). Parameters ---------- - my_phase_increment : float, + my_phase_increment : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_phase() >>> op.inputs.phase_increment.connect(my_phase_increment) - >>> #or + >>> # or >>> op.inputs.phase_increment(my_phase_increment) - """ return self._phase_increment + class OutputsMaxOverPhase(_Outputs): - """Intermediate class used to get outputs from max_over_phase operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.max_over_phase() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + max_over_phase operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.max_over_phase() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(max_over_phase._spec().outputs, op) - self._field = Output(max_over_phase._spec().output_pin(0), 0, op) + self._field = Output(max_over_phase._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_phase() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py b/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py index 27f078e1414..861a469e162 100644 --- a/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py +++ b/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py @@ -1,72 +1,122 @@ """ max_over_time_by_entity -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class max_over_time_by_entity(Operator): """Evaluates maximum over time/frequency. - available inputs: - - fields_container (FieldsContainer) - - abs_value (bool) (optional) - - compute_amplitude (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.max_over_time_by_entity() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.max_over_time_by_entity(fields_container=my_fields_container,abs_value=my_abs_value,compute_amplitude=my_compute_amplitude) + Parameters + ---------- + fields_container : FieldsContainer + abs_value : bool, optional + Should use absolute value. + compute_amplitude : bool, optional + Do calculate amplitude. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.max_over_time_by_entity() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.max_over_time_by_entity( + ... fields_container=my_fields_container, + ... abs_value=my_abs_value, + ... compute_amplitude=my_compute_amplitude, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, abs_value=None, compute_amplitude=None, config=None, server=None): - super().__init__(name="max_over_time_by_entity", config = config, server = server) + def __init__( + self, + fields_container=None, + abs_value=None, + compute_amplitude=None, + config=None, + server=None, + ): + super().__init__(name="max_over_time_by_entity", config=config, server=server) self._inputs = InputsMaxOverTimeByEntity(self) self._outputs = OutputsMaxOverTimeByEntity(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if abs_value !=None: + if abs_value is not None: self.inputs.abs_value.connect(abs_value) - if compute_amplitude !=None: + if compute_amplitude is not None: self.inputs.compute_amplitude.connect(compute_amplitude) @staticmethod def _spec(): - spec = Specification(description="""Evaluates maximum over time/frequency.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 3 : PinSpecification(name = "abs_value", type_names=["bool"], optional=True, document="""Should use absolute value."""), - 4 : PinSpecification(name = "compute_amplitude", type_names=["bool"], optional=True, document="""Do calculate amplitude.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluates maximum over time/frequency.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="abs_value", + type_names=["bool"], + optional=True, + document="""Should use absolute value.""", + ), + 4: PinSpecification( + name="compute_amplitude", + type_names=["bool"], + optional=True, + document="""Do calculate amplitude.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "max_over_time_by_entity") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="max_over_time_by_entity", server=server) @property def inputs(self): @@ -74,143 +124,141 @@ def inputs(self): Returns -------- - inputs : InputsMaxOverTimeByEntity + inputs : InputsMaxOverTimeByEntity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMaxOverTimeByEntity + outputs : OutputsMaxOverTimeByEntity """ return super().outputs -#internal name: max_over_time_by_entity -#scripting name: max_over_time_by_entity class InputsMaxOverTimeByEntity(_Inputs): - """Intermediate class used to connect user inputs to max_over_time_by_entity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.max_over_time_by_entity() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + """Intermediate class used to connect user inputs to + max_over_time_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.max_over_time_by_entity() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) """ + def __init__(self, op: Operator): super().__init__(max_over_time_by_entity._spec().inputs, op) - self._fields_container = Input(max_over_time_by_entity._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + max_over_time_by_entity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._abs_value = Input(max_over_time_by_entity._spec().input_pin(3), 3, op, -1) + self._abs_value = Input(max_over_time_by_entity._spec().input_pin(3), 3, op, -1) self._inputs.append(self._abs_value) - self._compute_amplitude = Input(max_over_time_by_entity._spec().input_pin(4), 4, op, -1) + self._compute_amplitude = Input( + max_over_time_by_entity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._compute_amplitude) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_time_by_entity() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def abs_value(self): - """Allows to connect abs_value input to the operator + """Allows to connect abs_value input to the operator. - - pindoc: Should use absolute value. + Should use absolute value. Parameters ---------- - my_abs_value : bool, + my_abs_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_time_by_entity() >>> op.inputs.abs_value.connect(my_abs_value) - >>> #or + >>> # or >>> op.inputs.abs_value(my_abs_value) - """ return self._abs_value @property def compute_amplitude(self): - """Allows to connect compute_amplitude input to the operator + """Allows to connect compute_amplitude input to the operator. - - pindoc: Do calculate amplitude. + Do calculate amplitude. Parameters ---------- - my_compute_amplitude : bool, + my_compute_amplitude : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_time_by_entity() >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - >>> #or + >>> # or >>> op.inputs.compute_amplitude(my_compute_amplitude) - """ return self._compute_amplitude + class OutputsMaxOverTimeByEntity(_Outputs): - """Intermediate class used to get outputs from max_over_time_by_entity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.max_over_time_by_entity() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + max_over_time_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.max_over_time_by_entity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(max_over_time_by_entity._spec().outputs, op) - self._fields_container = Output(max_over_time_by_entity._spec().output_pin(0), 0, op) + self._fields_container = Output( + max_over_time_by_entity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.max_over_time_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/min_max/min_by_component.py b/ansys/dpf/core/operators/min_max/min_by_component.py index 7df46521acc..deba13477cc 100644 --- a/ansys/dpf/core/operators/min_max/min_by_component.py +++ b/ansys/dpf/core/operators/min_max/min_by_component.py @@ -1,78 +1,161 @@ """ min_by_component -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_by_component(Operator): - """Give the maximum for each element rank by comparing several fields. - - available inputs: - - use_absolute_value (bool) - - fieldA1 (Field, FieldsContainer) - - fieldA2 (Field, FieldsContainer) - - fieldB2 (Field, FieldsContainer) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_by_component() - - >>> # Make input connections - >>> my_use_absolute_value = bool() - >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) - >>> my_fieldA1 = dpf.Field() - >>> op.inputs.fieldA1.connect(my_fieldA1) - >>> my_fieldA2 = dpf.Field() - >>> op.inputs.fieldA2.connect(my_fieldA2) - >>> my_fieldB2 = dpf.Field() - >>> op.inputs.fieldB2.connect(my_fieldB2) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_by_component(use_absolute_value=my_use_absolute_value,fieldA1=my_fieldA1,fieldA2=my_fieldA2,fieldB2=my_fieldB2) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, use_absolute_value=None, fieldA1=None, fieldA2=None, fieldB2=None, config=None, server=None): - super().__init__(name="min_by_component", config = config, server = server) + """Give the minimum for each element rank by comparing several fields. + + Parameters + ---------- + use_absolute_value : bool + Use_absolute_value + fieldA1 : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldA2 : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB1 : Field or FieldsContainer + Field or fields container with only one field + is expected + fieldB2 : Field or FieldsContainer + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_by_component() + + >>> # Make input connections + >>> my_use_absolute_value = bool() + >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) + >>> my_fieldA1 = dpf.Field() + >>> op.inputs.fieldA1.connect(my_fieldA1) + >>> my_fieldA2 = dpf.Field() + >>> op.inputs.fieldA2.connect(my_fieldA2) + >>> my_fieldB1 = dpf.Field() + >>> op.inputs.fieldB1.connect(my_fieldB1) + >>> my_fieldB2 = dpf.Field() + >>> op.inputs.fieldB2.connect(my_fieldB2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_by_component( + ... use_absolute_value=my_use_absolute_value, + ... fieldA1=my_fieldA1, + ... fieldA2=my_fieldA2, + ... fieldB1=my_fieldB1, + ... fieldB2=my_fieldB2, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + use_absolute_value=None, + fieldA1=None, + fieldA2=None, + fieldB1=None, + fieldB2=None, + config=None, + server=None, + ): + super().__init__(name="min_by_component", config=config, server=server) self._inputs = InputsMinByComponent(self) self._outputs = OutputsMinByComponent(self) - if use_absolute_value !=None: + if use_absolute_value is not None: self.inputs.use_absolute_value.connect(use_absolute_value) - if fieldA1 !=None: + if fieldA1 is not None: self.inputs.fieldA1.connect(fieldA1) - if fieldA2 !=None: + if fieldA2 is not None: self.inputs.fieldA2.connect(fieldA2) - if fieldB2 !=None: + if fieldB1 is not None: + self.inputs.fieldB1.connect(fieldB1) + if fieldB2 is not None: self.inputs.fieldB2.connect(fieldB2) @staticmethod def _spec(): - spec = Specification(description="""Give the maximum for each element rank by comparing several fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "use_absolute_value", type_names=["bool"], optional=False, document="""use_absolute_value"""), - 1 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 2 : PinSpecification(name = "fieldA", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 3 : PinSpecification(name = "fieldB", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = ( + """Give the minimum for each element rank by comparing several fields.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="use_absolute_value", + type_names=["bool"], + optional=False, + document="""Use_absolute_value""", + ), + 1: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 2: PinSpecification( + name="fieldA", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 2: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 3: PinSpecification( + name="fieldB", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_by_component") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_by_component", server=server) @property def inputs(self): @@ -80,171 +163,191 @@ def inputs(self): Returns -------- - inputs : InputsMinByComponent + inputs : InputsMinByComponent """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinByComponent + outputs : OutputsMinByComponent """ return super().outputs -#internal name: min_by_component -#scripting name: min_by_component class InputsMinByComponent(_Inputs): - """Intermediate class used to connect user inputs to min_by_component operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_by_component() - >>> my_use_absolute_value = bool() - >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) - >>> my_fieldA1 = dpf.Field() - >>> op.inputs.fieldA1.connect(my_fieldA1) - >>> my_fieldA2 = dpf.Field() - >>> op.inputs.fieldA2.connect(my_fieldA2) - >>> my_fieldB2 = dpf.Field() - >>> op.inputs.fieldB2.connect(my_fieldB2) + """Intermediate class used to connect user inputs to + min_by_component operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_by_component() + >>> my_use_absolute_value = bool() + >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) + >>> my_fieldA1 = dpf.Field() + >>> op.inputs.fieldA1.connect(my_fieldA1) + >>> my_fieldA2 = dpf.Field() + >>> op.inputs.fieldA2.connect(my_fieldA2) + >>> my_fieldB1 = dpf.Field() + >>> op.inputs.fieldB1.connect(my_fieldB1) + >>> my_fieldB2 = dpf.Field() + >>> op.inputs.fieldB2.connect(my_fieldB2) """ + def __init__(self, op: Operator): super().__init__(min_by_component._spec().inputs, op) - self._use_absolute_value = Input(min_by_component._spec().input_pin(0), 0, op, -1) + self._use_absolute_value = Input( + min_by_component._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._use_absolute_value) - self._fieldA1 = Input(min_by_component._spec().input_pin(1), 1, op, 0) + self._fieldA1 = Input(min_by_component._spec().input_pin(1), 1, op, 0) self._inputs.append(self._fieldA1) - self._fieldA2 = Input(min_by_component._spec().input_pin(2), 2, op, 1) + self._fieldA2 = Input(min_by_component._spec().input_pin(2), 2, op, 1) self._inputs.append(self._fieldA2) - self._fieldB2 = Input(min_by_component._spec().input_pin(3), 3, op, 1) + self._fieldB1 = Input(min_by_component._spec().input_pin(2), 2, op, 0) + self._inputs.append(self._fieldB1) + self._fieldB2 = Input(min_by_component._spec().input_pin(3), 3, op, 1) self._inputs.append(self._fieldB2) @property def use_absolute_value(self): - """Allows to connect use_absolute_value input to the operator + """Allows to connect use_absolute_value input to the operator. - - pindoc: use_absolute_value + Use_absolute_value Parameters ---------- - my_use_absolute_value : bool, + my_use_absolute_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_by_component() >>> op.inputs.use_absolute_value.connect(my_use_absolute_value) - >>> #or + >>> # or >>> op.inputs.use_absolute_value(my_use_absolute_value) - """ return self._use_absolute_value @property def fieldA1(self): - """Allows to connect fieldA1 input to the operator + """Allows to connect fieldA1 input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA1 : Field, FieldsContainer, + my_fieldA1 : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_by_component() >>> op.inputs.fieldA1.connect(my_fieldA1) - >>> #or + >>> # or >>> op.inputs.fieldA1(my_fieldA1) - """ return self._fieldA1 @property def fieldA2(self): - """Allows to connect fieldA2 input to the operator + """Allows to connect fieldA2 input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldA2 : Field, FieldsContainer, + my_fieldA2 : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_by_component() >>> op.inputs.fieldA2.connect(my_fieldA2) - >>> #or + >>> # or >>> op.inputs.fieldA2(my_fieldA2) - """ return self._fieldA2 @property - def fieldB2(self): - """Allows to connect fieldB2 input to the operator + def fieldB1(self): + """Allows to connect fieldB1 input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fieldB2 : Field, FieldsContainer, + my_fieldB1 : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_by_component() + >>> op.inputs.fieldB1.connect(my_fieldB1) + >>> # or + >>> op.inputs.fieldB1(my_fieldB1) + """ + return self._fieldB1 + + @property + def fieldB2(self): + """Allows to connect fieldB2 input to the operator. + + Field or fields container with only one field + is expected + Parameters + ---------- + my_fieldB2 : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf >>> op = dpf.operators.min_max.min_by_component() >>> op.inputs.fieldB2.connect(my_fieldB2) - >>> #or + >>> # or >>> op.inputs.fieldB2(my_fieldB2) - """ return self._fieldB2 + class OutputsMinByComponent(_Outputs): - """Intermediate class used to get outputs from min_by_component operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_by_component() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + min_by_component operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_by_component() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(min_by_component._spec().outputs, op) - self._field = Output(min_by_component._spec().output_pin(0), 0, op) + self._field = Output(min_by_component._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_by_component() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/min_max/min_max.py b/ansys/dpf/core/operators/min_max/min_max.py index 75e5001348e..9a9d38ae648 100644 --- a/ansys/dpf/core/operators/min_max/min_max.py +++ b/ansys/dpf/core/operators/min_max/min_max.py @@ -1,63 +1,101 @@ """ min_max -======= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_max(Operator): - """Compute the component-wise minimum (out 0) and maximum (out 1) over a field. + """Compute the component-wise minimum (out 0) and maximum (out 1) over a + field. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected - available inputs: - - field (Field, FieldsContainer) - available outputs: - - field_min (Field) - - field_max (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_max() - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_max() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_max( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_max(field=my_field) + >>> # Get output data + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + """ - >>> # Get output data - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="min_max", config = config, server = server) + super().__init__(name="min_max", config=config, server=server) self._inputs = InputsMinMax(self) self._outputs = OutputsMinMax(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Compute the component-wise minimum (out 0) and maximum (out 1) over a field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_min", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "field_max", type_names=["field"], optional=False, document="""""")}) + description = """Compute the component-wise minimum (out 0) and maximum (out 1) over a + field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_min", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="field_max", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_max") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_max", server=server) @property def inputs(self): @@ -65,115 +103,110 @@ def inputs(self): Returns -------- - inputs : InputsMinMax + inputs : InputsMinMax """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinMax + outputs : OutputsMinMax """ return super().outputs -#internal name: min_max -#scripting name: min_max class InputsMinMax(_Inputs): - """Intermediate class used to connect user inputs to min_max operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + min_max operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(min_max._spec().inputs, op) - self._field = Input(min_max._spec().input_pin(0), 0, op, -1) + self._field = Input(min_max._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsMinMax(_Outputs): - """Intermediate class used to get outputs from min_max operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max() - >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() + """Intermediate class used to get outputs from + min_max operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max() + >>> # Connect inputs : op.inputs. ... + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() """ + def __init__(self, op: Operator): super().__init__(min_max._spec().outputs, op) - self._field_min = Output(min_max._spec().output_pin(0), 0, op) + self._field_min = Output(min_max._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(min_max._spec().output_pin(1), 1, op) + self._field_max = Output(min_max._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) @property def field_min(self): """Allows to get field_min output of the operator - Returns ---------- - my_field_min : Field, + my_field_min : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max() >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - """ + >>> result_field_min = op.outputs.field_min() + """ # noqa: E501 return self._field_min @property def field_max(self): """Allows to get field_max output of the operator - Returns ---------- - my_field_max : Field, + my_field_max : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max() >>> # Connect inputs : op.inputs. ... - >>> result_field_max = op.outputs.field_max() - """ + >>> result_field_max = op.outputs.field_max() + """ # noqa: E501 return self._field_max - diff --git a/ansys/dpf/core/operators/min_max/min_max_by_entity.py b/ansys/dpf/core/operators/min_max/min_max_by_entity.py index f77a0e5698e..37c71f2c5a2 100644 --- a/ansys/dpf/core/operators/min_max/min_max_by_entity.py +++ b/ansys/dpf/core/operators/min_max/min_max_by_entity.py @@ -1,63 +1,98 @@ """ min_max_by_entity -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_max_by_entity(Operator): - """Compute the entity-wise minimum (out 0) and maximum (out 1) through all fields of a fields container. + """Compute the entity-wise minimum (out 0) and maximum (out 1) through + all fields of a fields container. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - field_min (Field) - - field_max (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_max_by_entity() - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_max_by_entity() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_max_by_entity( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_max_by_entity(fields_container=my_fields_container) + >>> # Get output data + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + """ - >>> # Get output data - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="min_max_by_entity", config = config, server = server) + super().__init__(name="min_max_by_entity", config=config, server=server) self._inputs = InputsMinMaxByEntity(self) self._outputs = OutputsMinMaxByEntity(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Compute the entity-wise minimum (out 0) and maximum (out 1) through all fields of a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_min", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "field_max", type_names=["field"], optional=False, document="""""")}) + description = """Compute the entity-wise minimum (out 0) and maximum (out 1) through + all fields of a fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_min", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="field_max", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_max_by_entity") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_max_by_entity", server=server) @property def inputs(self): @@ -65,113 +100,109 @@ def inputs(self): Returns -------- - inputs : InputsMinMaxByEntity + inputs : InputsMinMaxByEntity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinMaxByEntity + outputs : OutputsMinMaxByEntity """ return super().outputs -#internal name: min_max_by_entity -#scripting name: min_max_by_entity class InputsMinMaxByEntity(_Inputs): - """Intermediate class used to connect user inputs to min_max_by_entity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_by_entity() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + min_max_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_by_entity() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(min_max_by_entity._spec().inputs, op) - self._fields_container = Input(min_max_by_entity._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + min_max_by_entity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_by_entity() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsMinMaxByEntity(_Outputs): - """Intermediate class used to get outputs from min_max_by_entity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_by_entity() - >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() + """Intermediate class used to get outputs from + min_max_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_by_entity() + >>> # Connect inputs : op.inputs. ... + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() """ + def __init__(self, op: Operator): super().__init__(min_max_by_entity._spec().outputs, op) - self._field_min = Output(min_max_by_entity._spec().output_pin(0), 0, op) + self._field_min = Output(min_max_by_entity._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(min_max_by_entity._spec().output_pin(1), 1, op) + self._field_max = Output(min_max_by_entity._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) @property def field_min(self): """Allows to get field_min output of the operator - Returns ---------- - my_field_min : Field, + my_field_min : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - """ + >>> result_field_min = op.outputs.field_min() + """ # noqa: E501 return self._field_min @property def field_max(self): """Allows to get field_max output of the operator - Returns ---------- - my_field_max : Field, + my_field_max : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_field_max = op.outputs.field_max() - """ + >>> result_field_max = op.outputs.field_max() + """ # noqa: E501 return self._field_max - diff --git a/ansys/dpf/core/operators/min_max/min_max_by_time.py b/ansys/dpf/core/operators/min_max/min_max_by_time.py index e1f83f6d51d..05d1ab9502f 100644 --- a/ansys/dpf/core/operators/min_max/min_max_by_time.py +++ b/ansys/dpf/core/operators/min_max/min_max_by_time.py @@ -1,63 +1,98 @@ """ min_max_by_time =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_max_by_time(Operator): - """Evaluates minimum, maximum by time or frequency over all the entities of each field + """Evaluates minimum, maximum by time or frequency over all the entities + of each field + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - min (FieldsContainer) - - max (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_max_by_time() - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_max_by_time() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_max_by_time( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_max_by_time(fields_container=my_fields_container) + >>> # Get output data + >>> result_min = op.outputs.min() + >>> result_max = op.outputs.max() + """ - >>> # Get output data - >>> result_min = op.outputs.min() - >>> result_max = op.outputs.max()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="min_max_by_time", config = config, server = server) + super().__init__(name="min_max_by_time", config=config, server=server) self._inputs = InputsMinMaxByTime(self) self._outputs = OutputsMinMaxByTime(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Evaluates minimum, maximum by time or frequency over all the entities of each field""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "min", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "max", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluates minimum, maximum by time or frequency over all the entities + of each field""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="min", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="max", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_max_by_time") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_max_by_time", server=server) @property def inputs(self): @@ -65,113 +100,107 @@ def inputs(self): Returns -------- - inputs : InputsMinMaxByTime + inputs : InputsMinMaxByTime """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinMaxByTime + outputs : OutputsMinMaxByTime """ return super().outputs -#internal name: min_max_by_time -#scripting name: min_max_by_time class InputsMinMaxByTime(_Inputs): - """Intermediate class used to connect user inputs to min_max_by_time operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_by_time() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + min_max_by_time operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_by_time() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(min_max_by_time._spec().inputs, op) - self._fields_container = Input(min_max_by_time._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(min_max_by_time._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_by_time() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsMinMaxByTime(_Outputs): - """Intermediate class used to get outputs from min_max_by_time operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_by_time() - >>> # Connect inputs : op.inputs. ... - >>> result_min = op.outputs.min() - >>> result_max = op.outputs.max() + """Intermediate class used to get outputs from + min_max_by_time operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_by_time() + >>> # Connect inputs : op.inputs. ... + >>> result_min = op.outputs.min() + >>> result_max = op.outputs.max() """ + def __init__(self, op: Operator): super().__init__(min_max_by_time._spec().outputs, op) - self._min = Output(min_max_by_time._spec().output_pin(0), 0, op) + self._min = Output(min_max_by_time._spec().output_pin(0), 0, op) self._outputs.append(self._min) - self._max = Output(min_max_by_time._spec().output_pin(1), 1, op) + self._max = Output(min_max_by_time._spec().output_pin(1), 1, op) self._outputs.append(self._max) @property def min(self): """Allows to get min output of the operator - Returns ---------- - my_min : FieldsContainer, + my_min : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_by_time() >>> # Connect inputs : op.inputs. ... - >>> result_min = op.outputs.min() - """ + >>> result_min = op.outputs.min() + """ # noqa: E501 return self._min @property def max(self): """Allows to get max output of the operator - Returns ---------- - my_max : FieldsContainer, + my_max : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_by_time() >>> # Connect inputs : op.inputs. ... - >>> result_max = op.outputs.max() - """ + >>> result_max = op.outputs.max() + """ # noqa: E501 return self._max - diff --git a/ansys/dpf/core/operators/min_max/min_max_fc.py b/ansys/dpf/core/operators/min_max/min_max_fc.py index e8621239479..7b93de6c8a0 100644 --- a/ansys/dpf/core/operators/min_max/min_max_fc.py +++ b/ansys/dpf/core/operators/min_max/min_max_fc.py @@ -1,63 +1,98 @@ """ min_max_fc -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_max_fc(Operator): - """Compute the component-wise minimum (out 0) and maximum (out 1) over a fields container. + """Compute the component-wise minimum (out 0) and maximum (out 1) over a + fields container. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - field_min (Field) - - field_max (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_max_fc() - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_max_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_max_fc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_max_fc(fields_container=my_fields_container) + >>> # Get output data + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + """ - >>> # Get output data - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="min_max_fc", config = config, server = server) + super().__init__(name="min_max_fc", config=config, server=server) self._inputs = InputsMinMaxFc(self) self._outputs = OutputsMinMaxFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Compute the component-wise minimum (out 0) and maximum (out 1) over a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_min", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "field_max", type_names=["field"], optional=False, document="""""")}) + description = """Compute the component-wise minimum (out 0) and maximum (out 1) over a + fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_min", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="field_max", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_max_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_max_fc", server=server) @property def inputs(self): @@ -65,113 +100,107 @@ def inputs(self): Returns -------- - inputs : InputsMinMaxFc + inputs : InputsMinMaxFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinMaxFc + outputs : OutputsMinMaxFc """ return super().outputs -#internal name: min_max_fc -#scripting name: min_max_fc class InputsMinMaxFc(_Inputs): - """Intermediate class used to connect user inputs to min_max_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + min_max_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(min_max_fc._spec().inputs, op) - self._fields_container = Input(min_max_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(min_max_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsMinMaxFc(_Outputs): - """Intermediate class used to get outputs from min_max_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() + """Intermediate class used to get outputs from + min_max_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() """ + def __init__(self, op: Operator): super().__init__(min_max_fc._spec().outputs, op) - self._field_min = Output(min_max_fc._spec().output_pin(0), 0, op) + self._field_min = Output(min_max_fc._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(min_max_fc._spec().output_pin(1), 1, op) + self._field_max = Output(min_max_fc._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) @property def field_min(self): """Allows to get field_min output of the operator - Returns ---------- - my_field_min : Field, + my_field_min : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - """ + >>> result_field_min = op.outputs.field_min() + """ # noqa: E501 return self._field_min @property def field_max(self): """Allows to get field_max output of the operator - Returns ---------- - my_field_max : Field, + my_field_max : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field_max = op.outputs.field_max() - """ + >>> result_field_max = op.outputs.field_max() + """ # noqa: E501 return self._field_max - diff --git a/ansys/dpf/core/operators/min_max/min_max_fc_inc.py b/ansys/dpf/core/operators/min_max/min_max_fc_inc.py index 3e638d1e523..dd2f30a6a04 100644 --- a/ansys/dpf/core/operators/min_max/min_max_fc_inc.py +++ b/ansys/dpf/core/operators/min_max/min_max_fc_inc.py @@ -1,63 +1,98 @@ """ min_max_fc_inc -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_max_fc_inc(Operator): - """Compute the component-wise minimum (out 0) and maximum (out 1) over a fields container. + """Compute the component-wise minimum (out 0) and maximum (out 1) over a + fields container. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - field_min (Field) - - field_max (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_max_fc_inc() - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_max_fc_inc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_max_fc_inc( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_max_fc_inc(fields_container=my_fields_container) + >>> # Get output data + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + """ - >>> # Get output data - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="min_max_fc_inc", config = config, server = server) + super().__init__(name="min_max_fc_inc", config=config, server=server) self._inputs = InputsMinMaxFcInc(self) self._outputs = OutputsMinMaxFcInc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Compute the component-wise minimum (out 0) and maximum (out 1) over a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_min", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "field_max", type_names=["field"], optional=False, document="""""")}) + description = """Compute the component-wise minimum (out 0) and maximum (out 1) over a + fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_min", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="field_max", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_max_fc_inc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_max_fc_inc", server=server) @property def inputs(self): @@ -65,113 +100,107 @@ def inputs(self): Returns -------- - inputs : InputsMinMaxFcInc + inputs : InputsMinMaxFcInc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinMaxFcInc + outputs : OutputsMinMaxFcInc """ return super().outputs -#internal name: min_max_fc_inc -#scripting name: min_max_fc_inc class InputsMinMaxFcInc(_Inputs): - """Intermediate class used to connect user inputs to min_max_fc_inc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_fc_inc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + min_max_fc_inc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_fc_inc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(min_max_fc_inc._spec().inputs, op) - self._fields_container = Input(min_max_fc_inc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(min_max_fc_inc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_fc_inc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsMinMaxFcInc(_Outputs): - """Intermediate class used to get outputs from min_max_fc_inc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_fc_inc() - >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() + """Intermediate class used to get outputs from + min_max_fc_inc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_fc_inc() + >>> # Connect inputs : op.inputs. ... + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() """ + def __init__(self, op: Operator): super().__init__(min_max_fc_inc._spec().outputs, op) - self._field_min = Output(min_max_fc_inc._spec().output_pin(0), 0, op) + self._field_min = Output(min_max_fc_inc._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(min_max_fc_inc._spec().output_pin(1), 1, op) + self._field_max = Output(min_max_fc_inc._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) @property def field_min(self): """Allows to get field_min output of the operator - Returns ---------- - my_field_min : Field, + my_field_min : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_fc_inc() >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - """ + >>> result_field_min = op.outputs.field_min() + """ # noqa: E501 return self._field_min @property def field_max(self): """Allows to get field_max output of the operator - Returns ---------- - my_field_max : Field, + my_field_max : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_fc_inc() >>> # Connect inputs : op.inputs. ... - >>> result_field_max = op.outputs.field_max() - """ + >>> result_field_max = op.outputs.field_max() + """ # noqa: E501 return self._field_max - diff --git a/ansys/dpf/core/operators/min_max/min_max_inc.py b/ansys/dpf/core/operators/min_max/min_max_inc.py index eaeaf6d984d..0d5436b1285 100644 --- a/ansys/dpf/core/operators/min_max/min_max_inc.py +++ b/ansys/dpf/core/operators/min_max/min_max_inc.py @@ -1,75 +1,124 @@ """ min_max_inc -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_max_inc(Operator): - """Compute the component-wise minimum (out 0) and maximum (out 1) over coming fields. - - available inputs: - - field (Field) - - domain_id (int) (optional) - - available outputs: - - field_min (Field) - - field_max (Field) - - domain_ids_min (Scoping) - - domain_ids_max (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_max_inc() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_domain_id = int() - >>> op.inputs.domain_id.connect(my_domain_id) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_max_inc(field=my_field,domain_id=my_domain_id) - - >>> # Get output data - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() - >>> result_domain_ids_min = op.outputs.domain_ids_min() - >>> result_domain_ids_max = op.outputs.domain_ids_max()""" + """Compute the component-wise minimum (out 0) and maximum (out 1) over + coming fields. + + Parameters + ---------- + field : Field + domain_id : int, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_max_inc() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_domain_id = int() + >>> op.inputs.domain_id.connect(my_domain_id) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_max_inc( + ... field=my_field, + ... domain_id=my_domain_id, + ... ) + + >>> # Get output data + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + >>> result_domain_ids_min = op.outputs.domain_ids_min() + >>> result_domain_ids_max = op.outputs.domain_ids_max() + """ + def __init__(self, field=None, domain_id=None, config=None, server=None): - super().__init__(name="min_max_inc", config = config, server = server) + super().__init__(name="min_max_inc", config=config, server=server) self._inputs = InputsMinMaxInc(self) self._outputs = OutputsMinMaxInc(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if domain_id !=None: + if domain_id is not None: self.inputs.domain_id.connect(domain_id) @staticmethod def _spec(): - spec = Specification(description="""Compute the component-wise minimum (out 0) and maximum (out 1) over coming fields.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 17 : PinSpecification(name = "domain_id", type_names=["int32"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_min", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "field_max", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "domain_ids_min", type_names=["scoping"], optional=False, document=""""""), - 3 : PinSpecification(name = "domain_ids_max", type_names=["scoping"], optional=False, document="""""")}) + description = """Compute the component-wise minimum (out 0) and maximum (out 1) over + coming fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 17: PinSpecification( + name="domain_id", + type_names=["int32"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_min", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="field_max", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="domain_ids_min", + type_names=["scoping"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="domain_ids_max", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_max_inc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_max_inc", server=server) @property def inputs(self): @@ -77,181 +126,169 @@ def inputs(self): Returns -------- - inputs : InputsMinMaxInc + inputs : InputsMinMaxInc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinMaxInc + outputs : OutputsMinMaxInc """ return super().outputs -#internal name: min_max_inc -#scripting name: min_max_inc class InputsMinMaxInc(_Inputs): - """Intermediate class used to connect user inputs to min_max_inc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_inc() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_domain_id = int() - >>> op.inputs.domain_id.connect(my_domain_id) + """Intermediate class used to connect user inputs to + min_max_inc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_inc() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_domain_id = int() + >>> op.inputs.domain_id.connect(my_domain_id) """ + def __init__(self, op: Operator): super().__init__(min_max_inc._spec().inputs, op) - self._field = Input(min_max_inc._spec().input_pin(0), 0, op, -1) + self._field = Input(min_max_inc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._domain_id = Input(min_max_inc._spec().input_pin(17), 17, op, -1) + self._domain_id = Input(min_max_inc._spec().input_pin(17), 17, op, -1) self._inputs.append(self._domain_id) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_inc() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def domain_id(self): - """Allows to connect domain_id input to the operator + """Allows to connect domain_id input to the operator. Parameters ---------- - my_domain_id : int, + my_domain_id : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_inc() >>> op.inputs.domain_id.connect(my_domain_id) - >>> #or + >>> # or >>> op.inputs.domain_id(my_domain_id) - """ return self._domain_id + class OutputsMinMaxInc(_Outputs): - """Intermediate class used to get outputs from min_max_inc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_inc() - >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() - >>> result_domain_ids_min = op.outputs.domain_ids_min() - >>> result_domain_ids_max = op.outputs.domain_ids_max() + """Intermediate class used to get outputs from + min_max_inc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_inc() + >>> # Connect inputs : op.inputs. ... + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + >>> result_domain_ids_min = op.outputs.domain_ids_min() + >>> result_domain_ids_max = op.outputs.domain_ids_max() """ + def __init__(self, op: Operator): super().__init__(min_max_inc._spec().outputs, op) - self._field_min = Output(min_max_inc._spec().output_pin(0), 0, op) + self._field_min = Output(min_max_inc._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(min_max_inc._spec().output_pin(1), 1, op) + self._field_max = Output(min_max_inc._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) - self._domain_ids_min = Output(min_max_inc._spec().output_pin(2), 2, op) + self._domain_ids_min = Output(min_max_inc._spec().output_pin(2), 2, op) self._outputs.append(self._domain_ids_min) - self._domain_ids_max = Output(min_max_inc._spec().output_pin(3), 3, op) + self._domain_ids_max = Output(min_max_inc._spec().output_pin(3), 3, op) self._outputs.append(self._domain_ids_max) @property def field_min(self): """Allows to get field_min output of the operator - Returns ---------- - my_field_min : Field, + my_field_min : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_inc() >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - """ + >>> result_field_min = op.outputs.field_min() + """ # noqa: E501 return self._field_min @property def field_max(self): """Allows to get field_max output of the operator - Returns ---------- - my_field_max : Field, + my_field_max : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_inc() >>> # Connect inputs : op.inputs. ... - >>> result_field_max = op.outputs.field_max() - """ + >>> result_field_max = op.outputs.field_max() + """ # noqa: E501 return self._field_max @property def domain_ids_min(self): """Allows to get domain_ids_min output of the operator - Returns ---------- - my_domain_ids_min : Scoping, + my_domain_ids_min : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_inc() >>> # Connect inputs : op.inputs. ... - >>> result_domain_ids_min = op.outputs.domain_ids_min() - """ + >>> result_domain_ids_min = op.outputs.domain_ids_min() + """ # noqa: E501 return self._domain_ids_min @property def domain_ids_max(self): """Allows to get domain_ids_max output of the operator - Returns ---------- - my_domain_ids_max : Scoping, + my_domain_ids_max : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_inc() >>> # Connect inputs : op.inputs. ... - >>> result_domain_ids_max = op.outputs.domain_ids_max() - """ + >>> result_domain_ids_max = op.outputs.domain_ids_max() + """ # noqa: E501 return self._domain_ids_max - diff --git a/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py b/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py index 352766a2016..a2ef668abf3 100644 --- a/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py +++ b/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py @@ -1,81 +1,157 @@ """ min_max_over_label_fc -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_max_over_label_fc(Operator): - """Create two fields (0 min 1 max) by looping over the fields container in input and taking the min/max value by component through all the fields having the same id for the label set in input (in pin 1). If no label is specified or if the specified label doesn't exist, the operation is done over all the fields. The fields out are located on the label set in input, so their scoping are the labels ids.For each min max value, the label id for one other fields container labels is kept and returned in a scoping in pin 2 (min) and 3 (max).The field's scoping ids of the value kept in min max are also returned in the scopings in pin 4 (min) and 5 (max). - - available inputs: - - fields_container (FieldsContainer) - - label (str) - - available outputs: - - field_min (Field) - - field_max (Field) - - domain_ids_min (Scoping) - - domain_ids_max (Scoping) - - scoping_ids_min (Scoping) - - scoping_ids_max (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_max_over_label_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_label = str() - >>> op.inputs.label.connect(my_label) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_max_over_label_fc(fields_container=my_fields_container,label=my_label) - - >>> # Get output data - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() - >>> result_domain_ids_min = op.outputs.domain_ids_min() - >>> result_domain_ids_max = op.outputs.domain_ids_max() - >>> result_scoping_ids_min = op.outputs.scoping_ids_min() - >>> result_scoping_ids_max = op.outputs.scoping_ids_max()""" + """Create two fields (0 min 1 max) by looping over the fields container + in input and taking the min/max value by component through all the + fields having the same id for the label set in input (in pin 1). + If no label is specified or if the specified label doesn't exist, + the operation is done over all the fields. The fields out are + located on the label set in input, so their scoping are the labels + ids.For each min max value, the label id for one other fields + container labels is kept and returned in a scoping in pin 2 (min) + and 3 (max).The field's scoping ids of the value kept in min max + are also returned in the scopings in pin 4 (min) and 5 (max). + + Parameters + ---------- + fields_container : FieldsContainer + label : str + Label name from the fields container + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_max_over_label_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_label = str() + >>> op.inputs.label.connect(my_label) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_max_over_label_fc( + ... fields_container=my_fields_container, + ... label=my_label, + ... ) + + >>> # Get output data + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + >>> result_domain_ids_min = op.outputs.domain_ids_min() + >>> result_domain_ids_max = op.outputs.domain_ids_max() + >>> result_scoping_ids_min = op.outputs.scoping_ids_min() + >>> result_scoping_ids_max = op.outputs.scoping_ids_max() + """ + def __init__(self, fields_container=None, label=None, config=None, server=None): - super().__init__(name="min_max_over_label_fc", config = config, server = server) + super().__init__(name="min_max_over_label_fc", config=config, server=server) self._inputs = InputsMinMaxOverLabelFc(self) self._outputs = OutputsMinMaxOverLabelFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if label !=None: + if label is not None: self.inputs.label.connect(label) @staticmethod def _spec(): - spec = Specification(description="""Create two fields (0 min 1 max) by looping over the fields container in input and taking the min/max value by component through all the fields having the same id for the label set in input (in pin 1). If no label is specified or if the specified label doesn't exist, the operation is done over all the fields. The fields out are located on the label set in input, so their scoping are the labels ids.For each min max value, the label id for one other fields container labels is kept and returned in a scoping in pin 2 (min) and 3 (max).The field's scoping ids of the value kept in min max are also returned in the scopings in pin 4 (min) and 5 (max).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "label", type_names=["string"], optional=False, document="""label name from the fields container""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field_min", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "field_max", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "domain_ids_min", type_names=["scoping"], optional=True, document=""""""), - 3 : PinSpecification(name = "domain_ids_max", type_names=["scoping"], optional=True, document=""""""), - 4 : PinSpecification(name = "scoping_ids_min", type_names=["scoping"], optional=False, document=""""""), - 5 : PinSpecification(name = "scoping_ids_max", type_names=["scoping"], optional=False, document="""""")}) + description = """Create two fields (0 min 1 max) by looping over the fields container + in input and taking the min/max value by component through + all the fields having the same id for the label set in + input (in pin 1). If no label is specified or if the + specified label doesn't exist, the operation is done over + all the fields. The fields out are located on the label + set in input, so their scoping are the labels ids.For each + min max value, the label id for one other fields container + labels is kept and returned in a scoping in pin 2 (min) + and 3 (max).The field's scoping ids of the value kept in + min max are also returned in the scopings in pin 4 (min) + and 5 (max).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="label", + type_names=["string"], + optional=False, + document="""Label name from the fields container""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_min", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="field_max", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="domain_ids_min", + type_names=["scoping"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="domain_ids_max", + type_names=["scoping"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="scoping_ids_min", + type_names=["scoping"], + optional=False, + document="""""", + ), + 5: PinSpecification( + name="scoping_ids_max", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_max_over_label_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_max_over_label_fc", server=server) @property def inputs(self): @@ -83,227 +159,221 @@ def inputs(self): Returns -------- - inputs : InputsMinMaxOverLabelFc + inputs : InputsMinMaxOverLabelFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinMaxOverLabelFc + outputs : OutputsMinMaxOverLabelFc """ return super().outputs -#internal name: min_max_over_label_fc -#scripting name: min_max_over_label_fc class InputsMinMaxOverLabelFc(_Inputs): - """Intermediate class used to connect user inputs to min_max_over_label_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_over_label_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_label = str() - >>> op.inputs.label.connect(my_label) + """Intermediate class used to connect user inputs to + min_max_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_over_label_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_label = str() + >>> op.inputs.label.connect(my_label) """ + def __init__(self, op: Operator): super().__init__(min_max_over_label_fc._spec().inputs, op) - self._fields_container = Input(min_max_over_label_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + min_max_over_label_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._label = Input(min_max_over_label_fc._spec().input_pin(1), 1, op, -1) + self._label = Input(min_max_over_label_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._label) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_label_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def label(self): - """Allows to connect label input to the operator + """Allows to connect label input to the operator. - - pindoc: label name from the fields container + Label name from the fields container Parameters ---------- - my_label : str, + my_label : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_label_fc() >>> op.inputs.label.connect(my_label) - >>> #or + >>> # or >>> op.inputs.label(my_label) - """ return self._label + class OutputsMinMaxOverLabelFc(_Outputs): - """Intermediate class used to get outputs from min_max_over_label_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_over_label_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - >>> result_field_max = op.outputs.field_max() - >>> result_domain_ids_min = op.outputs.domain_ids_min() - >>> result_domain_ids_max = op.outputs.domain_ids_max() - >>> result_scoping_ids_min = op.outputs.scoping_ids_min() - >>> result_scoping_ids_max = op.outputs.scoping_ids_max() + """Intermediate class used to get outputs from + min_max_over_label_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_over_label_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_field_min = op.outputs.field_min() + >>> result_field_max = op.outputs.field_max() + >>> result_domain_ids_min = op.outputs.domain_ids_min() + >>> result_domain_ids_max = op.outputs.domain_ids_max() + >>> result_scoping_ids_min = op.outputs.scoping_ids_min() + >>> result_scoping_ids_max = op.outputs.scoping_ids_max() """ + def __init__(self, op: Operator): super().__init__(min_max_over_label_fc._spec().outputs, op) - self._field_min = Output(min_max_over_label_fc._spec().output_pin(0), 0, op) + self._field_min = Output(min_max_over_label_fc._spec().output_pin(0), 0, op) self._outputs.append(self._field_min) - self._field_max = Output(min_max_over_label_fc._spec().output_pin(1), 1, op) + self._field_max = Output(min_max_over_label_fc._spec().output_pin(1), 1, op) self._outputs.append(self._field_max) - self._domain_ids_min = Output(min_max_over_label_fc._spec().output_pin(2), 2, op) + self._domain_ids_min = Output( + min_max_over_label_fc._spec().output_pin(2), 2, op + ) self._outputs.append(self._domain_ids_min) - self._domain_ids_max = Output(min_max_over_label_fc._spec().output_pin(3), 3, op) + self._domain_ids_max = Output( + min_max_over_label_fc._spec().output_pin(3), 3, op + ) self._outputs.append(self._domain_ids_max) - self._scoping_ids_min = Output(min_max_over_label_fc._spec().output_pin(4), 4, op) + self._scoping_ids_min = Output( + min_max_over_label_fc._spec().output_pin(4), 4, op + ) self._outputs.append(self._scoping_ids_min) - self._scoping_ids_max = Output(min_max_over_label_fc._spec().output_pin(5), 5, op) + self._scoping_ids_max = Output( + min_max_over_label_fc._spec().output_pin(5), 5, op + ) self._outputs.append(self._scoping_ids_max) @property def field_min(self): """Allows to get field_min output of the operator - Returns ---------- - my_field_min : Field, + my_field_min : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field_min = op.outputs.field_min() - """ + >>> result_field_min = op.outputs.field_min() + """ # noqa: E501 return self._field_min @property def field_max(self): """Allows to get field_max output of the operator - Returns ---------- - my_field_max : Field, + my_field_max : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_field_max = op.outputs.field_max() - """ + >>> result_field_max = op.outputs.field_max() + """ # noqa: E501 return self._field_max @property def domain_ids_min(self): """Allows to get domain_ids_min output of the operator - Returns ---------- - my_domain_ids_min : Scoping, + my_domain_ids_min : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_domain_ids_min = op.outputs.domain_ids_min() - """ + >>> result_domain_ids_min = op.outputs.domain_ids_min() + """ # noqa: E501 return self._domain_ids_min @property def domain_ids_max(self): """Allows to get domain_ids_max output of the operator - Returns ---------- - my_domain_ids_max : Scoping, + my_domain_ids_max : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_domain_ids_max = op.outputs.domain_ids_max() - """ + >>> result_domain_ids_max = op.outputs.domain_ids_max() + """ # noqa: E501 return self._domain_ids_max @property def scoping_ids_min(self): """Allows to get scoping_ids_min output of the operator - Returns ---------- - my_scoping_ids_min : Scoping, + my_scoping_ids_min : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_scoping_ids_min = op.outputs.scoping_ids_min() - """ + >>> result_scoping_ids_min = op.outputs.scoping_ids_min() + """ # noqa: E501 return self._scoping_ids_min @property def scoping_ids_max(self): """Allows to get scoping_ids_max output of the operator - Returns ---------- - my_scoping_ids_max : Scoping, + my_scoping_ids_max : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_label_fc() >>> # Connect inputs : op.inputs. ... - >>> result_scoping_ids_max = op.outputs.scoping_ids_max() - """ + >>> result_scoping_ids_max = op.outputs.scoping_ids_max() + """ # noqa: E501 return self._scoping_ids_max - diff --git a/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py b/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py index fed5521d252..72967f250f4 100644 --- a/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py +++ b/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py @@ -1,75 +1,131 @@ """ min_max_over_time_by_entity -=========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_max_over_time_by_entity(Operator): - """Evaluates minimum, maximum over time/frequency and returns those min max as well as the time/freq where they occured - - available inputs: - - fields_container (FieldsContainer) - - compute_amplitude (bool) (optional) - - available outputs: - - min (FieldsContainer) - - max (FieldsContainer) - - time_freq_of_min (FieldsContainer) - - time_freq_of_max (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_max_over_time_by_entity(fields_container=my_fields_container,compute_amplitude=my_compute_amplitude) - - >>> # Get output data - >>> result_min = op.outputs.min() - >>> result_max = op.outputs.max() - >>> result_time_freq_of_min = op.outputs.time_freq_of_min() - >>> result_time_freq_of_max = op.outputs.time_freq_of_max()""" - def __init__(self, fields_container=None, compute_amplitude=None, config=None, server=None): - super().__init__(name="min_max_over_time_by_entity", config = config, server = server) + """Evaluates minimum, maximum over time/frequency and returns those min + max as well as the time/freq where they occured + + Parameters + ---------- + fields_container : FieldsContainer + compute_amplitude : bool, optional + Do calculate amplitude. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_max_over_time_by_entity() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_max_over_time_by_entity( + ... fields_container=my_fields_container, + ... compute_amplitude=my_compute_amplitude, + ... ) + + >>> # Get output data + >>> result_min = op.outputs.min() + >>> result_max = op.outputs.max() + >>> result_time_freq_of_min = op.outputs.time_freq_of_min() + >>> result_time_freq_of_max = op.outputs.time_freq_of_max() + """ + + def __init__( + self, fields_container=None, compute_amplitude=None, config=None, server=None + ): + super().__init__( + name="min_max_over_time_by_entity", config=config, server=server + ) self._inputs = InputsMinMaxOverTimeByEntity(self) self._outputs = OutputsMinMaxOverTimeByEntity(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if compute_amplitude !=None: + if compute_amplitude is not None: self.inputs.compute_amplitude.connect(compute_amplitude) @staticmethod def _spec(): - spec = Specification(description="""Evaluates minimum, maximum over time/frequency and returns those min max as well as the time/freq where they occured""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 4 : PinSpecification(name = "compute_amplitude", type_names=["bool"], optional=True, document="""Do calculate amplitude.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "min", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "max", type_names=["fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "time_freq_of_min", type_names=["fields_container"], optional=False, document=""""""), - 3 : PinSpecification(name = "time_freq_of_max", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluates minimum, maximum over time/frequency and returns those min + max as well as the time/freq where they occured""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 4: PinSpecification( + name="compute_amplitude", + type_names=["bool"], + optional=True, + document="""Do calculate amplitude.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="min", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="max", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="time_freq_of_min", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="time_freq_of_max", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_max_over_time_by_entity") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="min_max_over_time_by_entity", server=server + ) @property def inputs(self): @@ -77,183 +133,179 @@ def inputs(self): Returns -------- - inputs : InputsMinMaxOverTimeByEntity + inputs : InputsMinMaxOverTimeByEntity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinMaxOverTimeByEntity + outputs : OutputsMinMaxOverTimeByEntity """ return super().outputs -#internal name: min_max_over_time_by_entity -#scripting name: min_max_over_time_by_entity class InputsMinMaxOverTimeByEntity(_Inputs): - """Intermediate class used to connect user inputs to min_max_over_time_by_entity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + """Intermediate class used to connect user inputs to + min_max_over_time_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_over_time_by_entity() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) """ + def __init__(self, op: Operator): super().__init__(min_max_over_time_by_entity._spec().inputs, op) - self._fields_container = Input(min_max_over_time_by_entity._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + min_max_over_time_by_entity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._compute_amplitude = Input(min_max_over_time_by_entity._spec().input_pin(4), 4, op, -1) + self._compute_amplitude = Input( + min_max_over_time_by_entity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._compute_amplitude) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def compute_amplitude(self): - """Allows to connect compute_amplitude input to the operator + """Allows to connect compute_amplitude input to the operator. - - pindoc: Do calculate amplitude. + Do calculate amplitude. Parameters ---------- - my_compute_amplitude : bool, + my_compute_amplitude : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - >>> #or + >>> # or >>> op.inputs.compute_amplitude(my_compute_amplitude) - """ return self._compute_amplitude + class OutputsMinMaxOverTimeByEntity(_Outputs): - """Intermediate class used to get outputs from min_max_over_time_by_entity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() - >>> # Connect inputs : op.inputs. ... - >>> result_min = op.outputs.min() - >>> result_max = op.outputs.max() - >>> result_time_freq_of_min = op.outputs.time_freq_of_min() - >>> result_time_freq_of_max = op.outputs.time_freq_of_max() + """Intermediate class used to get outputs from + min_max_over_time_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_max_over_time_by_entity() + >>> # Connect inputs : op.inputs. ... + >>> result_min = op.outputs.min() + >>> result_max = op.outputs.max() + >>> result_time_freq_of_min = op.outputs.time_freq_of_min() + >>> result_time_freq_of_max = op.outputs.time_freq_of_max() """ + def __init__(self, op: Operator): super().__init__(min_max_over_time_by_entity._spec().outputs, op) - self._min = Output(min_max_over_time_by_entity._spec().output_pin(0), 0, op) + self._min = Output(min_max_over_time_by_entity._spec().output_pin(0), 0, op) self._outputs.append(self._min) - self._max = Output(min_max_over_time_by_entity._spec().output_pin(1), 1, op) + self._max = Output(min_max_over_time_by_entity._spec().output_pin(1), 1, op) self._outputs.append(self._max) - self._time_freq_of_min = Output(min_max_over_time_by_entity._spec().output_pin(2), 2, op) + self._time_freq_of_min = Output( + min_max_over_time_by_entity._spec().output_pin(2), 2, op + ) self._outputs.append(self._time_freq_of_min) - self._time_freq_of_max = Output(min_max_over_time_by_entity._spec().output_pin(3), 3, op) + self._time_freq_of_max = Output( + min_max_over_time_by_entity._spec().output_pin(3), 3, op + ) self._outputs.append(self._time_freq_of_max) @property def min(self): """Allows to get min output of the operator - Returns ---------- - my_min : FieldsContainer, + my_min : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_min = op.outputs.min() - """ + >>> result_min = op.outputs.min() + """ # noqa: E501 return self._min @property def max(self): """Allows to get max output of the operator - Returns ---------- - my_max : FieldsContainer, + my_max : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_max = op.outputs.max() - """ + >>> result_max = op.outputs.max() + """ # noqa: E501 return self._max @property def time_freq_of_min(self): """Allows to get time_freq_of_min output of the operator - Returns ---------- - my_time_freq_of_min : FieldsContainer, + my_time_freq_of_min : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_time_freq_of_min = op.outputs.time_freq_of_min() - """ + >>> result_time_freq_of_min = op.outputs.time_freq_of_min() + """ # noqa: E501 return self._time_freq_of_min @property def time_freq_of_max(self): """Allows to get time_freq_of_max output of the operator - Returns ---------- - my_time_freq_of_max : FieldsContainer, + my_time_freq_of_max : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_max_over_time_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_time_freq_of_max = op.outputs.time_freq_of_max() - """ + >>> result_time_freq_of_max = op.outputs.time_freq_of_max() + """ # noqa: E501 return self._time_freq_of_max - diff --git a/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py b/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py index 75e6dd3c797..a1944734ddf 100644 --- a/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py +++ b/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py @@ -1,72 +1,122 @@ """ min_over_time_by_entity -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class min_over_time_by_entity(Operator): """Evaluates minimum over time/frequency. - available inputs: - - fields_container (FieldsContainer) - - abs_value (bool) (optional) - - compute_amplitude (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.min_over_time_by_entity() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.min_over_time_by_entity(fields_container=my_fields_container,abs_value=my_abs_value,compute_amplitude=my_compute_amplitude) + Parameters + ---------- + fields_container : FieldsContainer + abs_value : bool, optional + Should use absolute value. + compute_amplitude : bool, optional + Do calculate amplitude. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.min_over_time_by_entity() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.min_over_time_by_entity( + ... fields_container=my_fields_container, + ... abs_value=my_abs_value, + ... compute_amplitude=my_compute_amplitude, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, abs_value=None, compute_amplitude=None, config=None, server=None): - super().__init__(name="min_over_time_by_entity", config = config, server = server) + def __init__( + self, + fields_container=None, + abs_value=None, + compute_amplitude=None, + config=None, + server=None, + ): + super().__init__(name="min_over_time_by_entity", config=config, server=server) self._inputs = InputsMinOverTimeByEntity(self) self._outputs = OutputsMinOverTimeByEntity(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if abs_value !=None: + if abs_value is not None: self.inputs.abs_value.connect(abs_value) - if compute_amplitude !=None: + if compute_amplitude is not None: self.inputs.compute_amplitude.connect(compute_amplitude) @staticmethod def _spec(): - spec = Specification(description="""Evaluates minimum over time/frequency.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 3 : PinSpecification(name = "abs_value", type_names=["bool"], optional=True, document="""Should use absolute value."""), - 4 : PinSpecification(name = "compute_amplitude", type_names=["bool"], optional=True, document="""Do calculate amplitude.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluates minimum over time/frequency.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="abs_value", + type_names=["bool"], + optional=True, + document="""Should use absolute value.""", + ), + 4: PinSpecification( + name="compute_amplitude", + type_names=["bool"], + optional=True, + document="""Do calculate amplitude.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "min_over_time_by_entity") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="min_over_time_by_entity", server=server) @property def inputs(self): @@ -74,143 +124,141 @@ def inputs(self): Returns -------- - inputs : InputsMinOverTimeByEntity + inputs : InputsMinOverTimeByEntity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMinOverTimeByEntity + outputs : OutputsMinOverTimeByEntity """ return super().outputs -#internal name: min_over_time_by_entity -#scripting name: min_over_time_by_entity class InputsMinOverTimeByEntity(_Inputs): - """Intermediate class used to connect user inputs to min_over_time_by_entity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_over_time_by_entity() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + """Intermediate class used to connect user inputs to + min_over_time_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_over_time_by_entity() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) """ + def __init__(self, op: Operator): super().__init__(min_over_time_by_entity._spec().inputs, op) - self._fields_container = Input(min_over_time_by_entity._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + min_over_time_by_entity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._abs_value = Input(min_over_time_by_entity._spec().input_pin(3), 3, op, -1) + self._abs_value = Input(min_over_time_by_entity._spec().input_pin(3), 3, op, -1) self._inputs.append(self._abs_value) - self._compute_amplitude = Input(min_over_time_by_entity._spec().input_pin(4), 4, op, -1) + self._compute_amplitude = Input( + min_over_time_by_entity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._compute_amplitude) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_over_time_by_entity() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def abs_value(self): - """Allows to connect abs_value input to the operator + """Allows to connect abs_value input to the operator. - - pindoc: Should use absolute value. + Should use absolute value. Parameters ---------- - my_abs_value : bool, + my_abs_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_over_time_by_entity() >>> op.inputs.abs_value.connect(my_abs_value) - >>> #or + >>> # or >>> op.inputs.abs_value(my_abs_value) - """ return self._abs_value @property def compute_amplitude(self): - """Allows to connect compute_amplitude input to the operator + """Allows to connect compute_amplitude input to the operator. - - pindoc: Do calculate amplitude. + Do calculate amplitude. Parameters ---------- - my_compute_amplitude : bool, + my_compute_amplitude : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_over_time_by_entity() >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - >>> #or + >>> # or >>> op.inputs.compute_amplitude(my_compute_amplitude) - """ return self._compute_amplitude + class OutputsMinOverTimeByEntity(_Outputs): - """Intermediate class used to get outputs from min_over_time_by_entity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.min_over_time_by_entity() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + min_over_time_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.min_over_time_by_entity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(min_over_time_by_entity._spec().outputs, op) - self._fields_container = Output(min_over_time_by_entity._spec().output_pin(0), 0, op) + self._fields_container = Output( + min_over_time_by_entity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.min_over_time_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/min_max/phase_of_max.py b/ansys/dpf/core/operators/min_max/phase_of_max.py index 89507a00606..1128382b66f 100644 --- a/ansys/dpf/core/operators/min_max/phase_of_max.py +++ b/ansys/dpf/core/operators/min_max/phase_of_max.py @@ -1,78 +1,135 @@ """ phase_of_max -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class phase_of_max(Operator): """Evaluates phase of maximum. - available inputs: - - real_field (Field) - - imaginary_field (Field) - - abs_value (bool) (optional) - - phase_increment (float) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.phase_of_max() - - >>> # Make input connections - >>> my_real_field = dpf.Field() - >>> op.inputs.real_field.connect(my_real_field) - >>> my_imaginary_field = dpf.Field() - >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_phase_increment = float() - >>> op.inputs.phase_increment.connect(my_phase_increment) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.phase_of_max(real_field=my_real_field,imaginary_field=my_imaginary_field,abs_value=my_abs_value,phase_increment=my_phase_increment) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, real_field=None, imaginary_field=None, abs_value=None, phase_increment=None, config=None, server=None): - super().__init__(name="phase_of_max", config = config, server = server) + Parameters + ---------- + real_field : Field + imaginary_field : Field + abs_value : bool, optional + Should use absolute value. + phase_increment : float + Phase increment. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.phase_of_max() + + >>> # Make input connections + >>> my_real_field = dpf.Field() + >>> op.inputs.real_field.connect(my_real_field) + >>> my_imaginary_field = dpf.Field() + >>> op.inputs.imaginary_field.connect(my_imaginary_field) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_phase_increment = float() + >>> op.inputs.phase_increment.connect(my_phase_increment) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.phase_of_max( + ... real_field=my_real_field, + ... imaginary_field=my_imaginary_field, + ... abs_value=my_abs_value, + ... phase_increment=my_phase_increment, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + real_field=None, + imaginary_field=None, + abs_value=None, + phase_increment=None, + config=None, + server=None, + ): + super().__init__(name="phase_of_max", config=config, server=server) self._inputs = InputsPhaseOfMax(self) self._outputs = OutputsPhaseOfMax(self) - if real_field !=None: + if real_field is not None: self.inputs.real_field.connect(real_field) - if imaginary_field !=None: + if imaginary_field is not None: self.inputs.imaginary_field.connect(imaginary_field) - if abs_value !=None: + if abs_value is not None: self.inputs.abs_value.connect(abs_value) - if phase_increment !=None: + if phase_increment is not None: self.inputs.phase_increment.connect(phase_increment) @staticmethod def _spec(): - spec = Specification(description="""Evaluates phase of maximum.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "real_field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "imaginary_field", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "abs_value", type_names=["bool"], optional=True, document="""Should use absolute value."""), - 3 : PinSpecification(name = "phase_increment", type_names=["double"], optional=False, document="""Phase increment.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Evaluates phase of maximum.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="real_field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="imaginary_field", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="abs_value", + type_names=["bool"], + optional=True, + document="""Should use absolute value.""", + ), + 3: PinSpecification( + name="phase_increment", + type_names=["double"], + optional=False, + document="""Phase increment.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "phase_of_max") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="phase_of_max", server=server) @property def inputs(self): @@ -80,167 +137,157 @@ def inputs(self): Returns -------- - inputs : InputsPhaseOfMax + inputs : InputsPhaseOfMax """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPhaseOfMax + outputs : OutputsPhaseOfMax """ return super().outputs -#internal name: phase_of_max -#scripting name: phase_of_max class InputsPhaseOfMax(_Inputs): - """Intermediate class used to connect user inputs to phase_of_max operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.phase_of_max() - >>> my_real_field = dpf.Field() - >>> op.inputs.real_field.connect(my_real_field) - >>> my_imaginary_field = dpf.Field() - >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_phase_increment = float() - >>> op.inputs.phase_increment.connect(my_phase_increment) + """Intermediate class used to connect user inputs to + phase_of_max operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.phase_of_max() + >>> my_real_field = dpf.Field() + >>> op.inputs.real_field.connect(my_real_field) + >>> my_imaginary_field = dpf.Field() + >>> op.inputs.imaginary_field.connect(my_imaginary_field) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_phase_increment = float() + >>> op.inputs.phase_increment.connect(my_phase_increment) """ + def __init__(self, op: Operator): super().__init__(phase_of_max._spec().inputs, op) - self._real_field = Input(phase_of_max._spec().input_pin(0), 0, op, -1) + self._real_field = Input(phase_of_max._spec().input_pin(0), 0, op, -1) self._inputs.append(self._real_field) - self._imaginary_field = Input(phase_of_max._spec().input_pin(1), 1, op, -1) + self._imaginary_field = Input(phase_of_max._spec().input_pin(1), 1, op, -1) self._inputs.append(self._imaginary_field) - self._abs_value = Input(phase_of_max._spec().input_pin(2), 2, op, -1) + self._abs_value = Input(phase_of_max._spec().input_pin(2), 2, op, -1) self._inputs.append(self._abs_value) - self._phase_increment = Input(phase_of_max._spec().input_pin(3), 3, op, -1) + self._phase_increment = Input(phase_of_max._spec().input_pin(3), 3, op, -1) self._inputs.append(self._phase_increment) @property def real_field(self): - """Allows to connect real_field input to the operator + """Allows to connect real_field input to the operator. Parameters ---------- - my_real_field : Field, + my_real_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.phase_of_max() >>> op.inputs.real_field.connect(my_real_field) - >>> #or + >>> # or >>> op.inputs.real_field(my_real_field) - """ return self._real_field @property def imaginary_field(self): - """Allows to connect imaginary_field input to the operator + """Allows to connect imaginary_field input to the operator. Parameters ---------- - my_imaginary_field : Field, + my_imaginary_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.phase_of_max() >>> op.inputs.imaginary_field.connect(my_imaginary_field) - >>> #or + >>> # or >>> op.inputs.imaginary_field(my_imaginary_field) - """ return self._imaginary_field @property def abs_value(self): - """Allows to connect abs_value input to the operator + """Allows to connect abs_value input to the operator. - - pindoc: Should use absolute value. + Should use absolute value. Parameters ---------- - my_abs_value : bool, + my_abs_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.phase_of_max() >>> op.inputs.abs_value.connect(my_abs_value) - >>> #or + >>> # or >>> op.inputs.abs_value(my_abs_value) - """ return self._abs_value @property def phase_increment(self): - """Allows to connect phase_increment input to the operator + """Allows to connect phase_increment input to the operator. - - pindoc: Phase increment. + Phase increment. Parameters ---------- - my_phase_increment : float, + my_phase_increment : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.phase_of_max() >>> op.inputs.phase_increment.connect(my_phase_increment) - >>> #or + >>> # or >>> op.inputs.phase_increment(my_phase_increment) - """ return self._phase_increment + class OutputsPhaseOfMax(_Outputs): - """Intermediate class used to get outputs from phase_of_max operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.phase_of_max() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + phase_of_max operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.phase_of_max() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(phase_of_max._spec().outputs, op) - self._field = Output(phase_of_max._spec().output_pin(0), 0, op) + self._field = Output(phase_of_max._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.phase_of_max() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py b/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py index 96ae597ccd6..f3da949ff16 100644 --- a/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py +++ b/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py @@ -1,72 +1,122 @@ """ time_of_max_by_entity -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class time_of_max_by_entity(Operator): """Evaluates time/frequency of maximum. - available inputs: - - fields_container (FieldsContainer) - - abs_value (bool) (optional) - - compute_amplitude (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.time_of_max_by_entity() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.time_of_max_by_entity(fields_container=my_fields_container,abs_value=my_abs_value,compute_amplitude=my_compute_amplitude) + Parameters + ---------- + fields_container : FieldsContainer + abs_value : bool, optional + Should use absolute value. + compute_amplitude : bool, optional + Do calculate amplitude. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.time_of_max_by_entity() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.time_of_max_by_entity( + ... fields_container=my_fields_container, + ... abs_value=my_abs_value, + ... compute_amplitude=my_compute_amplitude, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, abs_value=None, compute_amplitude=None, config=None, server=None): - super().__init__(name="time_of_max_by_entity", config = config, server = server) + def __init__( + self, + fields_container=None, + abs_value=None, + compute_amplitude=None, + config=None, + server=None, + ): + super().__init__(name="time_of_max_by_entity", config=config, server=server) self._inputs = InputsTimeOfMaxByEntity(self) self._outputs = OutputsTimeOfMaxByEntity(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if abs_value !=None: + if abs_value is not None: self.inputs.abs_value.connect(abs_value) - if compute_amplitude !=None: + if compute_amplitude is not None: self.inputs.compute_amplitude.connect(compute_amplitude) @staticmethod def _spec(): - spec = Specification(description="""Evaluates time/frequency of maximum.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 3 : PinSpecification(name = "abs_value", type_names=["bool"], optional=True, document="""Should use absolute value."""), - 4 : PinSpecification(name = "compute_amplitude", type_names=["bool"], optional=True, document="""Do calculate amplitude.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluates time/frequency of maximum.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="abs_value", + type_names=["bool"], + optional=True, + document="""Should use absolute value.""", + ), + 4: PinSpecification( + name="compute_amplitude", + type_names=["bool"], + optional=True, + document="""Do calculate amplitude.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "time_of_max_by_entity") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="time_of_max_by_entity", server=server) @property def inputs(self): @@ -74,143 +124,141 @@ def inputs(self): Returns -------- - inputs : InputsTimeOfMaxByEntity + inputs : InputsTimeOfMaxByEntity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsTimeOfMaxByEntity + outputs : OutputsTimeOfMaxByEntity """ return super().outputs -#internal name: time_of_max_by_entity -#scripting name: time_of_max_by_entity class InputsTimeOfMaxByEntity(_Inputs): - """Intermediate class used to connect user inputs to time_of_max_by_entity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.time_of_max_by_entity() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + """Intermediate class used to connect user inputs to + time_of_max_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.time_of_max_by_entity() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) """ + def __init__(self, op: Operator): super().__init__(time_of_max_by_entity._spec().inputs, op) - self._fields_container = Input(time_of_max_by_entity._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + time_of_max_by_entity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._abs_value = Input(time_of_max_by_entity._spec().input_pin(3), 3, op, -1) + self._abs_value = Input(time_of_max_by_entity._spec().input_pin(3), 3, op, -1) self._inputs.append(self._abs_value) - self._compute_amplitude = Input(time_of_max_by_entity._spec().input_pin(4), 4, op, -1) + self._compute_amplitude = Input( + time_of_max_by_entity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._compute_amplitude) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.time_of_max_by_entity() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def abs_value(self): - """Allows to connect abs_value input to the operator + """Allows to connect abs_value input to the operator. - - pindoc: Should use absolute value. + Should use absolute value. Parameters ---------- - my_abs_value : bool, + my_abs_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.time_of_max_by_entity() >>> op.inputs.abs_value.connect(my_abs_value) - >>> #or + >>> # or >>> op.inputs.abs_value(my_abs_value) - """ return self._abs_value @property def compute_amplitude(self): - """Allows to connect compute_amplitude input to the operator + """Allows to connect compute_amplitude input to the operator. - - pindoc: Do calculate amplitude. + Do calculate amplitude. Parameters ---------- - my_compute_amplitude : bool, + my_compute_amplitude : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.time_of_max_by_entity() >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - >>> #or + >>> # or >>> op.inputs.compute_amplitude(my_compute_amplitude) - """ return self._compute_amplitude + class OutputsTimeOfMaxByEntity(_Outputs): - """Intermediate class used to get outputs from time_of_max_by_entity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.time_of_max_by_entity() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + time_of_max_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.time_of_max_by_entity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(time_of_max_by_entity._spec().outputs, op) - self._fields_container = Output(time_of_max_by_entity._spec().output_pin(0), 0, op) + self._fields_container = Output( + time_of_max_by_entity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.time_of_max_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py b/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py index f5719478e84..4d774c3bca9 100644 --- a/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py +++ b/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py @@ -1,72 +1,122 @@ """ time_of_min_by_entity -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "min_max" category -""" class time_of_min_by_entity(Operator): """Evaluates time/frequency of minimum. - available inputs: - - fields_container (FieldsContainer) - - abs_value (bool) (optional) - - compute_amplitude (bool) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.min_max.time_of_min_by_entity() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.min_max.time_of_min_by_entity(fields_container=my_fields_container,abs_value=my_abs_value,compute_amplitude=my_compute_amplitude) + Parameters + ---------- + fields_container : FieldsContainer + abs_value : bool, optional + Should use absolute value. + compute_amplitude : bool, optional + Do calculate amplitude. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.min_max.time_of_min_by_entity() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.min_max.time_of_min_by_entity( + ... fields_container=my_fields_container, + ... abs_value=my_abs_value, + ... compute_amplitude=my_compute_amplitude, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, abs_value=None, compute_amplitude=None, config=None, server=None): - super().__init__(name="time_of_min_by_entity", config = config, server = server) + def __init__( + self, + fields_container=None, + abs_value=None, + compute_amplitude=None, + config=None, + server=None, + ): + super().__init__(name="time_of_min_by_entity", config=config, server=server) self._inputs = InputsTimeOfMinByEntity(self) self._outputs = OutputsTimeOfMinByEntity(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if abs_value !=None: + if abs_value is not None: self.inputs.abs_value.connect(abs_value) - if compute_amplitude !=None: + if compute_amplitude is not None: self.inputs.compute_amplitude.connect(compute_amplitude) @staticmethod def _spec(): - spec = Specification(description="""Evaluates time/frequency of minimum.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 3 : PinSpecification(name = "abs_value", type_names=["bool"], optional=True, document="""Should use absolute value."""), - 4 : PinSpecification(name = "compute_amplitude", type_names=["bool"], optional=True, document="""Do calculate amplitude.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Evaluates time/frequency of minimum.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="abs_value", + type_names=["bool"], + optional=True, + document="""Should use absolute value.""", + ), + 4: PinSpecification( + name="compute_amplitude", + type_names=["bool"], + optional=True, + document="""Do calculate amplitude.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "time_of_min_by_entity") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="time_of_min_by_entity", server=server) @property def inputs(self): @@ -74,143 +124,141 @@ def inputs(self): Returns -------- - inputs : InputsTimeOfMinByEntity + inputs : InputsTimeOfMinByEntity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsTimeOfMinByEntity + outputs : OutputsTimeOfMinByEntity """ return super().outputs -#internal name: time_of_min_by_entity -#scripting name: time_of_min_by_entity class InputsTimeOfMinByEntity(_Inputs): - """Intermediate class used to connect user inputs to time_of_min_by_entity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.time_of_min_by_entity() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abs_value = bool() - >>> op.inputs.abs_value.connect(my_abs_value) - >>> my_compute_amplitude = bool() - >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) + """Intermediate class used to connect user inputs to + time_of_min_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.time_of_min_by_entity() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abs_value = bool() + >>> op.inputs.abs_value.connect(my_abs_value) + >>> my_compute_amplitude = bool() + >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) """ + def __init__(self, op: Operator): super().__init__(time_of_min_by_entity._spec().inputs, op) - self._fields_container = Input(time_of_min_by_entity._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + time_of_min_by_entity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._abs_value = Input(time_of_min_by_entity._spec().input_pin(3), 3, op, -1) + self._abs_value = Input(time_of_min_by_entity._spec().input_pin(3), 3, op, -1) self._inputs.append(self._abs_value) - self._compute_amplitude = Input(time_of_min_by_entity._spec().input_pin(4), 4, op, -1) + self._compute_amplitude = Input( + time_of_min_by_entity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._compute_amplitude) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.time_of_min_by_entity() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def abs_value(self): - """Allows to connect abs_value input to the operator + """Allows to connect abs_value input to the operator. - - pindoc: Should use absolute value. + Should use absolute value. Parameters ---------- - my_abs_value : bool, + my_abs_value : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.time_of_min_by_entity() >>> op.inputs.abs_value.connect(my_abs_value) - >>> #or + >>> # or >>> op.inputs.abs_value(my_abs_value) - """ return self._abs_value @property def compute_amplitude(self): - """Allows to connect compute_amplitude input to the operator + """Allows to connect compute_amplitude input to the operator. - - pindoc: Do calculate amplitude. + Do calculate amplitude. Parameters ---------- - my_compute_amplitude : bool, + my_compute_amplitude : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.time_of_min_by_entity() >>> op.inputs.compute_amplitude.connect(my_compute_amplitude) - >>> #or + >>> # or >>> op.inputs.compute_amplitude(my_compute_amplitude) - """ return self._compute_amplitude + class OutputsTimeOfMinByEntity(_Outputs): - """Intermediate class used to get outputs from time_of_min_by_entity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.min_max.time_of_min_by_entity() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + time_of_min_by_entity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.min_max.time_of_min_by_entity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(time_of_min_by_entity._spec().outputs, op) - self._fields_container = Output(time_of_min_by_entity._spec().output_pin(0), 0, op) + self._fields_container = Output( + time_of_min_by_entity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.min_max.time_of_min_by_entity() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/operator.mustache b/ansys/dpf/core/operators/operator.mustache new file mode 100644 index 00000000000..21fed079b7d --- /dev/null +++ b/ansys/dpf/core/operators/operator.mustache @@ -0,0 +1,237 @@ +""" +{{class_name}} +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +{{#outputs}} +from ansys.dpf.core.outputs import Output, _Outputs +{{/outputs}} +{{^outputs}} +from ansys.dpf.core.outputs import _Outputs +{{/outputs}} +{{#multiple_output_types}} +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type +{{/multiple_output_types}} +from ansys.dpf.core.operators.specification import PinSpecification, Specification + +class {{class_name}}(Operator): + """{{{docstring}}} + + Parameters + ---------- + {{#input_pins}} + {{#optional}} + {{name}} :{{#types_for_docstring}} {{types_for_docstring}},{{/types_for_docstring}} optional + {{/optional}} + {{^optional}} + {{name}} :{{#types_for_docstring}} {{types_for_docstring}}{{/types_for_docstring}} + {{/optional}} + {{#document}} + {{{document}}} + {{/document}} + {{/input_pins}} + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.{{category}}.{{class_name}}() + + >>> # Make input connections + {{#input_pins}} + >>> my_{{name}} = {{^built_in_main_type}}dpf.{{/built_in_main_type}}{{main_type}}() + >>> op.inputs.{{name}}.connect(my_{{name}}) + {{/input_pins}} + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.{{category}}.{{class_name}}( + {{#input_pins}} + ... {{name}}=my_{{name}}, + {{/input_pins}} + ... ) + + {{#outputs}} + >>> # Get output data + {{#output_pins}} + >>> result_{{name}} = op.outputs.{{name}}() + {{/output_pins}} + {{/outputs}} + """ + + def __init__(self, {{#input_pins}}{{name}}=None, {{/input_pins}}config=None, server=None): + super().__init__(name="{{operator_name}}", config=config, server=server) + self._inputs = Inputs{{capital_class_name}}(self) + self._outputs = Outputs{{capital_class_name}}(self) + {{#input_pins}} + if {{name}} is not None: + self.inputs.{{name}}.connect({{name}}) + {{/input_pins}} + + @staticmethod + def _spec(): + description = """{{specification_description}}""" + spec = Specification( + description=description, + map_input_pin_spec={ + {{#input_pins}} + {{id}}: PinSpecification( + name="{{pin_name}}", + {{#has_types}} + type_names={{{types}}}, + {{/has_types}} + {{^has_types}} + type_names=["any"], + {{/has_types}} + optional={{optional}}, + document="""{{{document}}}""", + ), + {{/input_pins}} + }, + map_output_pin_spec={ + {{#output_pins}} + {{id}}: PinSpecification( + name="{{name}}", + {{#has_types}} + type_names={{{types}}}, + {{/has_types}} + optional={{optional}}, + document="""{{{document}}}""", + ), + {{/output_pins}} + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="{{operator_name}}", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : Inputs{{capital_class_name}} + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : Outputs{{capital_class_name}} + """ + return super().outputs + +class Inputs{{capital_class_name}}(_Inputs): + """Intermediate class used to connect user inputs to + {{class_name}} operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.{{category}}.{{class_name}}() + {{#input_pins}} + >>> my_{{name}} = {{^built_in_main_type}}dpf.{{/built_in_main_type}}{{main_type}}() + >>> op.inputs.{{name}}.connect(my_{{name}}) + {{/input_pins}} + """ + + def __init__(self, op: Operator): + super().__init__({{class_name}}._spec().inputs, op) + {{#input_pins}} + self._{{name}} = Input({{class_name}}._spec().input_pin({{id}}), {{id}}, op, {{ellipsis}}) + self._inputs.append(self._{{name}}) + {{/input_pins}} + + {{#input_pins}} + @property + def {{name}}(self): + """Allows to connect {{name}} input to the operator. + {{#document}} + + {{{document}}} + {{/document}} + + Parameters + ---------- + my_{{name}} :{{#types_for_docstring}} {{types_for_docstring}}{{/types_for_docstring}} + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.{{category}}.{{class_name}}() + >>> op.inputs.{{name}}.connect(my_{{name}}) + >>> # or + >>> op.inputs.{{name}}(my_{{name}}) + """ + return self._{{name}} + + {{/input_pins}} +class Outputs{{capital_class_name}}(_Outputs): + """Intermediate class used to get outputs from + {{class_name}} operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.{{category}}.{{class_name}}() + >>> # Connect inputs : op.inputs. ... + {{#output_pins}} + >>> result_{{name}} = op.outputs.{{name}}() + {{/output_pins}} + """ + + def __init__(self, op: Operator): + super().__init__({{class_name}}._spec().outputs, op) + {{#output_pins}} + {{#multiple_types}} + {{#printable_type_names}} + self.{{name}}_as_{{.}} = Output(_modify_output_spec_with_one_type({{class_name}}._spec().output_pin({{id}}), "{{.}}"), {{id}}, op) + self._outputs.append(self.{{name}}_as_{{.}}) + {{/printable_type_names}} + {{/multiple_types}} + {{^multiple_types}} + self._{{name}} = Output({{class_name}}._spec().output_pin({{id}}), {{id}}, op) + self._outputs.append(self._{{name}}) + {{/multiple_types}} + {{/output_pins}} + {{#output_pins}}{{^multiple_types}} + + @property + def {{name}}(self): + """Allows to get {{name}} output of the operator + + Returns + ---------- + my_{{name}} :{{#types_for_docstring}} {{types_for_docstring}}{{/types_for_docstring}} + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.{{category}}.{{class_name}}() + >>> # Connect inputs : op.inputs. ... + >>> result_{{name}} = op.outputs.{{name}}() + """ # noqa: E501 + return self._{{name}} + {{/multiple_types}}{{/output_pins}} \ No newline at end of file diff --git a/ansys/dpf/core/operators/result/__init__.py b/ansys/dpf/core/operators/result/__init__.py index ac91c9bd3f0..0589eea729b 100644 --- a/ansys/dpf/core/operators/result/__init__.py +++ b/ansys/dpf/core/operators/result/__init__.py @@ -1,11 +1,15 @@ from .plastic_strain_principal_1 import plastic_strain_principal_1 -from .plastic_strain_principal_3 import plastic_strain_principal_3 +from .current_density import current_density from .rigid_transformation import rigid_transformation +from .num_surface_status_changes import num_surface_status_changes +from .joint_relative_angular_velocity import joint_relative_angular_velocity from .elastic_strain_Y import elastic_strain_Y from .nodal_moment import nodal_moment from .elemental_mass import elemental_mass from .heat_flux import heat_flux from .co_energy import co_energy +from .plastic_strain_principal_3 import plastic_strain_principal_3 +from .electric_flux_density import electric_flux_density from .plastic_strain_principal_2 import plastic_strain_principal_2 from .elastic_strain_Z import elastic_strain_Z from .stress import stress @@ -75,7 +79,6 @@ from .contact_sliding_distance import contact_sliding_distance from .contact_gap_distance import contact_gap_distance from .contact_surface_heat_flux import contact_surface_heat_flux -from .num_surface_status_changes import num_surface_status_changes from .contact_fluid_penetration_pressure import contact_fluid_penetration_pressure from .elemental_volume import elemental_volume from .artificial_hourglass_energy import artificial_hourglass_energy @@ -87,15 +90,31 @@ from .raw_reaction_force import raw_reaction_force from .electric_potential import electric_potential from .thickness import thickness +from .equivalent_mass import equivalent_mass from .element_orientations import element_orientations from .custom import custom +from .elemental_heat_generation import elemental_heat_generation +from .temperature_grad import temperature_grad +from .joint_force_reaction import joint_force_reaction +from .joint_moment_reaction import joint_moment_reaction +from .joint_relative_displacement import joint_relative_displacement +from .joint_relative_rotation import joint_relative_rotation +from .joint_relative_velocity import joint_relative_velocity +from .joint_relative_acceleration import joint_relative_acceleration +from .joint_relative_angular_acceleration import joint_relative_angular_acceleration +from .thermal_strains_eqv import thermal_strains_eqv +from .swelling_strains import swelling_strains from .stress_von_mises import stress_von_mises +from .members_in_compression_not_certified import members_in_compression_not_certified +from .members_in_bending_not_certified import members_in_bending_not_certified +from .members_in_linear_compression_bending_not_certified import members_in_linear_compression_bending_not_certified from .cyclic_expansion import cyclic_expansion from .equivalent_radiated_power import equivalent_radiated_power from .torque import torque +from .recombine_harmonic_indeces_cyclic import recombine_harmonic_indeces_cyclic +from .euler_load_buckling import euler_load_buckling from .cyclic_analytic_usum_max import cyclic_analytic_usum_max from .cyclic_analytic_seqv_max import cyclic_analytic_seqv_max -from .recombine_harmonic_indeces_cyclic import recombine_harmonic_indeces_cyclic from .poynting_vector import poynting_vector from .poynting_vector_surface import poynting_vector_surface from .nodal_averaged_elastic_strains import nodal_averaged_elastic_strains @@ -104,10 +123,10 @@ from .run import run from .cyclic_expanded_velocity import cyclic_expanded_velocity from .cyclic_expanded_el_strain import cyclic_expanded_el_strain -from .nodal_averaged_thermal_swelling_strains import nodal_averaged_thermal_swelling_strains -from .nodal_averaged_stresses import nodal_averaged_stresses from .nodal_averaged_thermal_strains import nodal_averaged_thermal_strains from .nodal_averaged_plastic_strains import nodal_averaged_plastic_strains +from .nodal_averaged_thermal_swelling_strains import nodal_averaged_thermal_swelling_strains +from .nodal_averaged_stresses import nodal_averaged_stresses from .nodal_averaged_creep_strains import nodal_averaged_creep_strains from .nodal_averaged_equivalent_thermal_strains import nodal_averaged_equivalent_thermal_strains from .nodal_averaged_equivalent_plastic_strain import nodal_averaged_equivalent_plastic_strain @@ -116,6 +135,7 @@ from .nmisc import nmisc from .enf_rotation_by_euler_nodes import enf_rotation_by_euler_nodes from .cms_matrices_provider import cms_matrices_provider +from .coordinate_system import coordinate_system from .smisc import smisc from .nodal_rotation_by_euler_nodes import nodal_rotation_by_euler_nodes from .stress_rotation_by_euler_nodes import stress_rotation_by_euler_nodes diff --git a/ansys/dpf/core/operators/result/acceleration.py b/ansys/dpf/core/operators/result/acceleration.py index 228865691d6..20c8d6b2985 100644 --- a/ansys/dpf/core/operators/result/acceleration.py +++ b/ansys/dpf/core/operators/result/acceleration.py @@ -1,92 +1,238 @@ """ acceleration -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class acceleration(Operator): - """Read/compute nodal accelerations by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.acceleration() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.acceleration(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="A", config = config, server = server) + """Read/compute nodal accelerations by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.acceleration() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.acceleration( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="A", config=config, server=server) self._inputs = InputsAcceleration(self) self._outputs = OutputsAcceleration(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal accelerations by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal accelerations by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "A") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="A", server=server) @property def inputs(self): @@ -94,275 +240,277 @@ def inputs(self): Returns -------- - inputs : InputsAcceleration + inputs : InputsAcceleration """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAcceleration + outputs : OutputsAcceleration """ return super().outputs -#internal name: A -#scripting name: acceleration class InputsAcceleration(_Inputs): - """Intermediate class used to connect user inputs to acceleration operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.acceleration() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + acceleration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.acceleration() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(acceleration._spec().inputs, op) - self._time_scoping = Input(acceleration._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(acceleration._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(acceleration._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(acceleration._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(acceleration._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(acceleration._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(acceleration._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(acceleration._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(acceleration._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(acceleration._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(acceleration._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + acceleration._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(acceleration._spec().input_pin(7), 7, op, -1) + self._mesh = Input(acceleration._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(acceleration._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(acceleration._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsAcceleration(_Outputs): - """Intermediate class used to get outputs from acceleration operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.acceleration() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + acceleration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.acceleration() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(acceleration._spec().outputs, op) - self._fields_container = Output(acceleration._spec().output_pin(0), 0, op) + self._fields_container = Output(acceleration._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/acceleration_X.py b/ansys/dpf/core/operators/result/acceleration_X.py index 8d9177cf688..990158d417a 100644 --- a/ansys/dpf/core/operators/result/acceleration_X.py +++ b/ansys/dpf/core/operators/result/acceleration_X.py @@ -1,92 +1,239 @@ """ acceleration_X -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class acceleration_X(Operator): - """Read/compute nodal accelerations X component of the vector (1st component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.acceleration_X() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.acceleration_X(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="AX", config = config, server = server) + """Read/compute nodal accelerations X component of the vector (1st + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.acceleration_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.acceleration_X( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="AX", config=config, server=server) self._inputs = InputsAccelerationX(self) self._outputs = OutputsAccelerationX(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal accelerations X component of the vector (1st component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal accelerations X component of the vector (1st + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "AX") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="AX", server=server) @property def inputs(self): @@ -94,275 +241,277 @@ def inputs(self): Returns -------- - inputs : InputsAccelerationX + inputs : InputsAccelerationX """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccelerationX + outputs : OutputsAccelerationX """ return super().outputs -#internal name: AX -#scripting name: acceleration_X class InputsAccelerationX(_Inputs): - """Intermediate class used to connect user inputs to acceleration_X operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.acceleration_X() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + acceleration_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.acceleration_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(acceleration_X._spec().inputs, op) - self._time_scoping = Input(acceleration_X._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(acceleration_X._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(acceleration_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(acceleration_X._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(acceleration_X._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(acceleration_X._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(acceleration_X._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(acceleration_X._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(acceleration_X._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(acceleration_X._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(acceleration_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + acceleration_X._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(acceleration_X._spec().input_pin(7), 7, op, -1) + self._mesh = Input(acceleration_X._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(acceleration_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(acceleration_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsAccelerationX(_Outputs): - """Intermediate class used to get outputs from acceleration_X operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.acceleration_X() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + acceleration_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.acceleration_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(acceleration_X._spec().outputs, op) - self._fields_container = Output(acceleration_X._spec().output_pin(0), 0, op) + self._fields_container = Output(acceleration_X._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_X() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/acceleration_Y.py b/ansys/dpf/core/operators/result/acceleration_Y.py index ce63ad171e6..2876e6542a8 100644 --- a/ansys/dpf/core/operators/result/acceleration_Y.py +++ b/ansys/dpf/core/operators/result/acceleration_Y.py @@ -1,92 +1,239 @@ """ acceleration_Y -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class acceleration_Y(Operator): - """Read/compute nodal accelerations Y component of the vector (2nd component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.acceleration_Y() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.acceleration_Y(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="AY", config = config, server = server) + """Read/compute nodal accelerations Y component of the vector (2nd + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.acceleration_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.acceleration_Y( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="AY", config=config, server=server) self._inputs = InputsAccelerationY(self) self._outputs = OutputsAccelerationY(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal accelerations Y component of the vector (2nd component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal accelerations Y component of the vector (2nd + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "AY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="AY", server=server) @property def inputs(self): @@ -94,275 +241,277 @@ def inputs(self): Returns -------- - inputs : InputsAccelerationY + inputs : InputsAccelerationY """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccelerationY + outputs : OutputsAccelerationY """ return super().outputs -#internal name: AY -#scripting name: acceleration_Y class InputsAccelerationY(_Inputs): - """Intermediate class used to connect user inputs to acceleration_Y operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.acceleration_Y() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + acceleration_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.acceleration_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(acceleration_Y._spec().inputs, op) - self._time_scoping = Input(acceleration_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(acceleration_Y._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(acceleration_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(acceleration_Y._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(acceleration_Y._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(acceleration_Y._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(acceleration_Y._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(acceleration_Y._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(acceleration_Y._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(acceleration_Y._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(acceleration_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + acceleration_Y._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(acceleration_Y._spec().input_pin(7), 7, op, -1) + self._mesh = Input(acceleration_Y._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(acceleration_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(acceleration_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsAccelerationY(_Outputs): - """Intermediate class used to get outputs from acceleration_Y operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.acceleration_Y() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + acceleration_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.acceleration_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(acceleration_Y._spec().outputs, op) - self._fields_container = Output(acceleration_Y._spec().output_pin(0), 0, op) + self._fields_container = Output(acceleration_Y._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Y() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/acceleration_Z.py b/ansys/dpf/core/operators/result/acceleration_Z.py index 2281ec0d502..e17f01efbd6 100644 --- a/ansys/dpf/core/operators/result/acceleration_Z.py +++ b/ansys/dpf/core/operators/result/acceleration_Z.py @@ -1,92 +1,239 @@ """ acceleration_Z -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class acceleration_Z(Operator): - """Read/compute nodal accelerations Z component of the vector (3rd component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.acceleration_Z() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.acceleration_Z(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="AZ", config = config, server = server) + """Read/compute nodal accelerations Z component of the vector (3rd + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.acceleration_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.acceleration_Z( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="AZ", config=config, server=server) self._inputs = InputsAccelerationZ(self) self._outputs = OutputsAccelerationZ(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal accelerations Z component of the vector (3rd component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal accelerations Z component of the vector (3rd + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "AZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="AZ", server=server) @property def inputs(self): @@ -94,275 +241,277 @@ def inputs(self): Returns -------- - inputs : InputsAccelerationZ + inputs : InputsAccelerationZ """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccelerationZ + outputs : OutputsAccelerationZ """ return super().outputs -#internal name: AZ -#scripting name: acceleration_Z class InputsAccelerationZ(_Inputs): - """Intermediate class used to connect user inputs to acceleration_Z operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.acceleration_Z() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + acceleration_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.acceleration_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(acceleration_Z._spec().inputs, op) - self._time_scoping = Input(acceleration_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(acceleration_Z._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(acceleration_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(acceleration_Z._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(acceleration_Z._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(acceleration_Z._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(acceleration_Z._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(acceleration_Z._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(acceleration_Z._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(acceleration_Z._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(acceleration_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + acceleration_Z._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(acceleration_Z._spec().input_pin(7), 7, op, -1) + self._mesh = Input(acceleration_Z._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(acceleration_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(acceleration_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsAccelerationZ(_Outputs): - """Intermediate class used to get outputs from acceleration_Z operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.acceleration_Z() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + acceleration_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.acceleration_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(acceleration_Z._spec().outputs, op) - self._fields_container = Output(acceleration_Z._spec().output_pin(0), 0, op) + self._fields_container = Output(acceleration_Z._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.acceleration_Z() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py b/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py index 3eeeb55720d..6933a25ac54 100644 --- a/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py +++ b/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py @@ -1,98 +1,274 @@ """ accu_eqv_creep_strain -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class accu_eqv_creep_strain(Operator): - """Read/compute element nodal accumulated equivalent creep strain by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.accu_eqv_creep_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.accu_eqv_creep_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_CREQ", config = config, server = server) + """Read/compute element nodal accumulated equivalent creep strain by + calling the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.accu_eqv_creep_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.accu_eqv_creep_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_CREQ", config=config, server=server) self._inputs = InputsAccuEqvCreepStrain(self) self._outputs = OutputsAccuEqvCreepStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal accumulated equivalent creep strain by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal accumulated equivalent creep strain by + calling the readers defined by the datasources. Regarding + the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_CREQ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_CREQ", server=server) @property def inputs(self): @@ -100,301 +276,345 @@ def inputs(self): Returns -------- - inputs : InputsAccuEqvCreepStrain + inputs : InputsAccuEqvCreepStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccuEqvCreepStrain + outputs : OutputsAccuEqvCreepStrain """ return super().outputs -#internal name: ENL_CREQ -#scripting name: accu_eqv_creep_strain class InputsAccuEqvCreepStrain(_Inputs): - """Intermediate class used to connect user inputs to accu_eqv_creep_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.accu_eqv_creep_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + accu_eqv_creep_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.accu_eqv_creep_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(accu_eqv_creep_strain._spec().inputs, op) - self._time_scoping = Input(accu_eqv_creep_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + accu_eqv_creep_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(accu_eqv_creep_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + accu_eqv_creep_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(accu_eqv_creep_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + accu_eqv_creep_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(accu_eqv_creep_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + accu_eqv_creep_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(accu_eqv_creep_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + accu_eqv_creep_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(accu_eqv_creep_strain._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + accu_eqv_creep_strain._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(accu_eqv_creep_strain._spec().input_pin(7), 7, op, -1) + self._mesh = Input(accu_eqv_creep_strain._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(accu_eqv_creep_strain._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + accu_eqv_creep_strain._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(accu_eqv_creep_strain._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + accu_eqv_creep_strain._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + accu_eqv_creep_strain._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.accu_eqv_creep_strain() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsAccuEqvCreepStrain(_Outputs): - """Intermediate class used to get outputs from accu_eqv_creep_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.accu_eqv_creep_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + accu_eqv_creep_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.accu_eqv_creep_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(accu_eqv_creep_strain._spec().outputs, op) - self._fields_container = Output(accu_eqv_creep_strain._spec().output_pin(0), 0, op) + self._fields_container = Output( + accu_eqv_creep_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_creep_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py b/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py index b9600ca36e9..01f2644061a 100644 --- a/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py +++ b/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py @@ -1,98 +1,274 @@ """ accu_eqv_plastic_strain -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class accu_eqv_plastic_strain(Operator): - """Read/compute element nodal accumulated equivalent plastic strain by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.accu_eqv_plastic_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.accu_eqv_plastic_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_EPEQ", config = config, server = server) + """Read/compute element nodal accumulated equivalent plastic strain by + calling the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.accu_eqv_plastic_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.accu_eqv_plastic_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_EPEQ", config=config, server=server) self._inputs = InputsAccuEqvPlasticStrain(self) self._outputs = OutputsAccuEqvPlasticStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal accumulated equivalent plastic strain by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal accumulated equivalent plastic strain by + calling the readers defined by the datasources. Regarding + the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_EPEQ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_EPEQ", server=server) @property def inputs(self): @@ -100,301 +276,345 @@ def inputs(self): Returns -------- - inputs : InputsAccuEqvPlasticStrain + inputs : InputsAccuEqvPlasticStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAccuEqvPlasticStrain + outputs : OutputsAccuEqvPlasticStrain """ return super().outputs -#internal name: ENL_EPEQ -#scripting name: accu_eqv_plastic_strain class InputsAccuEqvPlasticStrain(_Inputs): - """Intermediate class used to connect user inputs to accu_eqv_plastic_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.accu_eqv_plastic_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + accu_eqv_plastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.accu_eqv_plastic_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(accu_eqv_plastic_strain._spec().inputs, op) - self._time_scoping = Input(accu_eqv_plastic_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + accu_eqv_plastic_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(accu_eqv_plastic_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + accu_eqv_plastic_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(accu_eqv_plastic_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + accu_eqv_plastic_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(accu_eqv_plastic_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + accu_eqv_plastic_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(accu_eqv_plastic_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + accu_eqv_plastic_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(accu_eqv_plastic_strain._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + accu_eqv_plastic_strain._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(accu_eqv_plastic_strain._spec().input_pin(7), 7, op, -1) + self._mesh = Input(accu_eqv_plastic_strain._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(accu_eqv_plastic_strain._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + accu_eqv_plastic_strain._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(accu_eqv_plastic_strain._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + accu_eqv_plastic_strain._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + accu_eqv_plastic_strain._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.accu_eqv_plastic_strain() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsAccuEqvPlasticStrain(_Outputs): - """Intermediate class used to get outputs from accu_eqv_plastic_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.accu_eqv_plastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + accu_eqv_plastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.accu_eqv_plastic_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(accu_eqv_plastic_strain._spec().outputs, op) - self._fields_container = Output(accu_eqv_plastic_strain._spec().output_pin(0), 0, op) + self._fields_container = Output( + accu_eqv_plastic_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.accu_eqv_plastic_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/add_rigid_body_motion.py b/ansys/dpf/core/operators/result/add_rigid_body_motion.py index a14f5812bbf..ff18ca43a14 100644 --- a/ansys/dpf/core/operators/result/add_rigid_body_motion.py +++ b/ansys/dpf/core/operators/result/add_rigid_body_motion.py @@ -1,84 +1,153 @@ """ add_rigid_body_motion -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class add_rigid_body_motion(Operator): - """Adds a given rigid translation, center and rotation from a displacement field. The rotation is given in terms of rotations angles. Note that the displacement field has to be in the global coordinate sytem - - available inputs: - - displacement_field (Field) - - translation_field (Field) - - rotation_field (Field) - - center_field (Field) - - mesh (MeshedRegion) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.add_rigid_body_motion() - - >>> # Make input connections - >>> my_displacement_field = dpf.Field() - >>> op.inputs.displacement_field.connect(my_displacement_field) - >>> my_translation_field = dpf.Field() - >>> op.inputs.translation_field.connect(my_translation_field) - >>> my_rotation_field = dpf.Field() - >>> op.inputs.rotation_field.connect(my_rotation_field) - >>> my_center_field = dpf.Field() - >>> op.inputs.center_field.connect(my_center_field) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.add_rigid_body_motion(displacement_field=my_displacement_field,translation_field=my_translation_field,rotation_field=my_rotation_field,center_field=my_center_field,mesh=my_mesh) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, displacement_field=None, translation_field=None, rotation_field=None, center_field=None, mesh=None, config=None, server=None): - super().__init__(name="RigidBodyAddition", config = config, server = server) + """Adds a given rigid translation, center and rotation from a + displacement field. The rotation is given in terms of rotations + angles. Note that the displacement field has to be in the global + coordinate sytem + + Parameters + ---------- + displacement_field : Field + translation_field : Field + rotation_field : Field + center_field : Field + mesh : MeshedRegion, optional + Default is the mesh in the support + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.add_rigid_body_motion() + + >>> # Make input connections + >>> my_displacement_field = dpf.Field() + >>> op.inputs.displacement_field.connect(my_displacement_field) + >>> my_translation_field = dpf.Field() + >>> op.inputs.translation_field.connect(my_translation_field) + >>> my_rotation_field = dpf.Field() + >>> op.inputs.rotation_field.connect(my_rotation_field) + >>> my_center_field = dpf.Field() + >>> op.inputs.center_field.connect(my_center_field) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.add_rigid_body_motion( + ... displacement_field=my_displacement_field, + ... translation_field=my_translation_field, + ... rotation_field=my_rotation_field, + ... center_field=my_center_field, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + displacement_field=None, + translation_field=None, + rotation_field=None, + center_field=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="RigidBodyAddition", config=config, server=server) self._inputs = InputsAddRigidBodyMotion(self) self._outputs = OutputsAddRigidBodyMotion(self) - if displacement_field !=None: + if displacement_field is not None: self.inputs.displacement_field.connect(displacement_field) - if translation_field !=None: + if translation_field is not None: self.inputs.translation_field.connect(translation_field) - if rotation_field !=None: + if rotation_field is not None: self.inputs.rotation_field.connect(rotation_field) - if center_field !=None: + if center_field is not None: self.inputs.center_field.connect(center_field) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Adds a given rigid translation, center and rotation from a displacement field. The rotation is given in terms of rotations angles. Note that the displacement field has to be in the global coordinate sytem""", - map_input_pin_spec={ - 0 : PinSpecification(name = "displacement_field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "translation_field", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "rotation_field", type_names=["field"], optional=False, document=""""""), - 3 : PinSpecification(name = "center_field", type_names=["field"], optional=False, document=""""""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""default is the mesh in the support""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Adds a given rigid translation, center and rotation from a + displacement field. The rotation is given in terms of + rotations angles. Note that the displacement field has to + be in the global coordinate sytem""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="displacement_field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="translation_field", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="rotation_field", + type_names=["field"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="center_field", + type_names=["field"], + optional=False, + document="""""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Default is the mesh in the support""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "RigidBodyAddition") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="RigidBodyAddition", server=server) @property def inputs(self): @@ -86,189 +155,185 @@ def inputs(self): Returns -------- - inputs : InputsAddRigidBodyMotion + inputs : InputsAddRigidBodyMotion """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAddRigidBodyMotion + outputs : OutputsAddRigidBodyMotion """ return super().outputs -#internal name: RigidBodyAddition -#scripting name: add_rigid_body_motion class InputsAddRigidBodyMotion(_Inputs): - """Intermediate class used to connect user inputs to add_rigid_body_motion operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.add_rigid_body_motion() - >>> my_displacement_field = dpf.Field() - >>> op.inputs.displacement_field.connect(my_displacement_field) - >>> my_translation_field = dpf.Field() - >>> op.inputs.translation_field.connect(my_translation_field) - >>> my_rotation_field = dpf.Field() - >>> op.inputs.rotation_field.connect(my_rotation_field) - >>> my_center_field = dpf.Field() - >>> op.inputs.center_field.connect(my_center_field) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + add_rigid_body_motion operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.add_rigid_body_motion() + >>> my_displacement_field = dpf.Field() + >>> op.inputs.displacement_field.connect(my_displacement_field) + >>> my_translation_field = dpf.Field() + >>> op.inputs.translation_field.connect(my_translation_field) + >>> my_rotation_field = dpf.Field() + >>> op.inputs.rotation_field.connect(my_rotation_field) + >>> my_center_field = dpf.Field() + >>> op.inputs.center_field.connect(my_center_field) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(add_rigid_body_motion._spec().inputs, op) - self._displacement_field = Input(add_rigid_body_motion._spec().input_pin(0), 0, op, -1) + self._displacement_field = Input( + add_rigid_body_motion._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._displacement_field) - self._translation_field = Input(add_rigid_body_motion._spec().input_pin(1), 1, op, -1) + self._translation_field = Input( + add_rigid_body_motion._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._translation_field) - self._rotation_field = Input(add_rigid_body_motion._spec().input_pin(2), 2, op, -1) + self._rotation_field = Input( + add_rigid_body_motion._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._rotation_field) - self._center_field = Input(add_rigid_body_motion._spec().input_pin(3), 3, op, -1) + self._center_field = Input( + add_rigid_body_motion._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._center_field) - self._mesh = Input(add_rigid_body_motion._spec().input_pin(7), 7, op, -1) + self._mesh = Input(add_rigid_body_motion._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def displacement_field(self): - """Allows to connect displacement_field input to the operator + """Allows to connect displacement_field input to the operator. Parameters ---------- - my_displacement_field : Field, + my_displacement_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion() >>> op.inputs.displacement_field.connect(my_displacement_field) - >>> #or + >>> # or >>> op.inputs.displacement_field(my_displacement_field) - """ return self._displacement_field @property def translation_field(self): - """Allows to connect translation_field input to the operator + """Allows to connect translation_field input to the operator. Parameters ---------- - my_translation_field : Field, + my_translation_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion() >>> op.inputs.translation_field.connect(my_translation_field) - >>> #or + >>> # or >>> op.inputs.translation_field(my_translation_field) - """ return self._translation_field @property def rotation_field(self): - """Allows to connect rotation_field input to the operator + """Allows to connect rotation_field input to the operator. Parameters ---------- - my_rotation_field : Field, + my_rotation_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion() >>> op.inputs.rotation_field.connect(my_rotation_field) - >>> #or + >>> # or >>> op.inputs.rotation_field(my_rotation_field) - """ return self._rotation_field @property def center_field(self): - """Allows to connect center_field input to the operator + """Allows to connect center_field input to the operator. Parameters ---------- - my_center_field : Field, + my_center_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion() >>> op.inputs.center_field.connect(my_center_field) - >>> #or + >>> # or >>> op.inputs.center_field(my_center_field) - """ return self._center_field @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: default is the mesh in the support + Default is the mesh in the support Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsAddRigidBodyMotion(_Outputs): - """Intermediate class used to get outputs from add_rigid_body_motion operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.add_rigid_body_motion() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + add_rigid_body_motion operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.add_rigid_body_motion() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(add_rigid_body_motion._spec().outputs, op) - self._field = Output(add_rigid_body_motion._spec().output_pin(0), 0, op) + self._field = Output(add_rigid_body_motion._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py b/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py index e95140b8bb2..abb9c7b920e 100644 --- a/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py +++ b/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py @@ -1,84 +1,153 @@ """ add_rigid_body_motion_fc -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class add_rigid_body_motion_fc(Operator): - """Adds a given rigid translation, center and rotation from a displacement field. The rotation is given in terms of rotations angles. Note that the displacement field has to be in the global coordinate sytem - - available inputs: - - fields_container (FieldsContainer) - - translation_field (Field) - - rotation_field (Field) - - center_field (Field) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.add_rigid_body_motion_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_translation_field = dpf.Field() - >>> op.inputs.translation_field.connect(my_translation_field) - >>> my_rotation_field = dpf.Field() - >>> op.inputs.rotation_field.connect(my_rotation_field) - >>> my_center_field = dpf.Field() - >>> op.inputs.center_field.connect(my_center_field) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.add_rigid_body_motion_fc(fields_container=my_fields_container,translation_field=my_translation_field,rotation_field=my_rotation_field,center_field=my_center_field,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, translation_field=None, rotation_field=None, center_field=None, mesh=None, config=None, server=None): - super().__init__(name="RigidBodyAddition_fc", config = config, server = server) + """Adds a given rigid translation, center and rotation from a + displacement field. The rotation is given in terms of rotations + angles. Note that the displacement field has to be in the global + coordinate sytem + + Parameters + ---------- + fields_container : FieldsContainer + translation_field : Field + rotation_field : Field + center_field : Field + mesh : MeshedRegion, optional + Default is the mesh in the support + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.add_rigid_body_motion_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_translation_field = dpf.Field() + >>> op.inputs.translation_field.connect(my_translation_field) + >>> my_rotation_field = dpf.Field() + >>> op.inputs.rotation_field.connect(my_rotation_field) + >>> my_center_field = dpf.Field() + >>> op.inputs.center_field.connect(my_center_field) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.add_rigid_body_motion_fc( + ... fields_container=my_fields_container, + ... translation_field=my_translation_field, + ... rotation_field=my_rotation_field, + ... center_field=my_center_field, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + translation_field=None, + rotation_field=None, + center_field=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="RigidBodyAddition_fc", config=config, server=server) self._inputs = InputsAddRigidBodyMotionFc(self) self._outputs = OutputsAddRigidBodyMotionFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if translation_field !=None: + if translation_field is not None: self.inputs.translation_field.connect(translation_field) - if rotation_field !=None: + if rotation_field is not None: self.inputs.rotation_field.connect(rotation_field) - if center_field !=None: + if center_field is not None: self.inputs.center_field.connect(center_field) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Adds a given rigid translation, center and rotation from a displacement field. The rotation is given in terms of rotations angles. Note that the displacement field has to be in the global coordinate sytem""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "translation_field", type_names=["field"], optional=False, document=""""""), - 2 : PinSpecification(name = "rotation_field", type_names=["field"], optional=False, document=""""""), - 3 : PinSpecification(name = "center_field", type_names=["field"], optional=False, document=""""""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""default is the mesh in the support""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Adds a given rigid translation, center and rotation from a + displacement field. The rotation is given in terms of + rotations angles. Note that the displacement field has to + be in the global coordinate sytem""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="translation_field", + type_names=["field"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="rotation_field", + type_names=["field"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="center_field", + type_names=["field"], + optional=False, + document="""""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Default is the mesh in the support""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "RigidBodyAddition_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="RigidBodyAddition_fc", server=server) @property def inputs(self): @@ -86,189 +155,187 @@ def inputs(self): Returns -------- - inputs : InputsAddRigidBodyMotionFc + inputs : InputsAddRigidBodyMotionFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsAddRigidBodyMotionFc + outputs : OutputsAddRigidBodyMotionFc """ return super().outputs -#internal name: RigidBodyAddition_fc -#scripting name: add_rigid_body_motion_fc class InputsAddRigidBodyMotionFc(_Inputs): - """Intermediate class used to connect user inputs to add_rigid_body_motion_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.add_rigid_body_motion_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_translation_field = dpf.Field() - >>> op.inputs.translation_field.connect(my_translation_field) - >>> my_rotation_field = dpf.Field() - >>> op.inputs.rotation_field.connect(my_rotation_field) - >>> my_center_field = dpf.Field() - >>> op.inputs.center_field.connect(my_center_field) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + add_rigid_body_motion_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.add_rigid_body_motion_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_translation_field = dpf.Field() + >>> op.inputs.translation_field.connect(my_translation_field) + >>> my_rotation_field = dpf.Field() + >>> op.inputs.rotation_field.connect(my_rotation_field) + >>> my_center_field = dpf.Field() + >>> op.inputs.center_field.connect(my_center_field) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(add_rigid_body_motion_fc._spec().inputs, op) - self._fields_container = Input(add_rigid_body_motion_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + add_rigid_body_motion_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._translation_field = Input(add_rigid_body_motion_fc._spec().input_pin(1), 1, op, -1) + self._translation_field = Input( + add_rigid_body_motion_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._translation_field) - self._rotation_field = Input(add_rigid_body_motion_fc._spec().input_pin(2), 2, op, -1) + self._rotation_field = Input( + add_rigid_body_motion_fc._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._rotation_field) - self._center_field = Input(add_rigid_body_motion_fc._spec().input_pin(3), 3, op, -1) + self._center_field = Input( + add_rigid_body_motion_fc._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._center_field) - self._mesh = Input(add_rigid_body_motion_fc._spec().input_pin(7), 7, op, -1) + self._mesh = Input(add_rigid_body_motion_fc._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def translation_field(self): - """Allows to connect translation_field input to the operator + """Allows to connect translation_field input to the operator. Parameters ---------- - my_translation_field : Field, + my_translation_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion_fc() >>> op.inputs.translation_field.connect(my_translation_field) - >>> #or + >>> # or >>> op.inputs.translation_field(my_translation_field) - """ return self._translation_field @property def rotation_field(self): - """Allows to connect rotation_field input to the operator + """Allows to connect rotation_field input to the operator. Parameters ---------- - my_rotation_field : Field, + my_rotation_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion_fc() >>> op.inputs.rotation_field.connect(my_rotation_field) - >>> #or + >>> # or >>> op.inputs.rotation_field(my_rotation_field) - """ return self._rotation_field @property def center_field(self): - """Allows to connect center_field input to the operator + """Allows to connect center_field input to the operator. Parameters ---------- - my_center_field : Field, + my_center_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion_fc() >>> op.inputs.center_field.connect(my_center_field) - >>> #or + >>> # or >>> op.inputs.center_field(my_center_field) - """ return self._center_field @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: default is the mesh in the support + Default is the mesh in the support Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsAddRigidBodyMotionFc(_Outputs): - """Intermediate class used to get outputs from add_rigid_body_motion_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.add_rigid_body_motion_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + add_rigid_body_motion_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.add_rigid_body_motion_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(add_rigid_body_motion_fc._spec().outputs, op) - self._fields_container = Output(add_rigid_body_motion_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + add_rigid_body_motion_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.add_rigid_body_motion_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/artificial_hourglass_energy.py b/ansys/dpf/core/operators/result/artificial_hourglass_energy.py index d09cbc92e6c..c209322218e 100644 --- a/ansys/dpf/core/operators/result/artificial_hourglass_energy.py +++ b/ansys/dpf/core/operators/result/artificial_hourglass_energy.py @@ -1,92 +1,238 @@ """ artificial_hourglass_energy -=========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class artificial_hourglass_energy(Operator): - """Read/compute artificial hourglass energy by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.artificial_hourglass_energy() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.artificial_hourglass_energy(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ENG_AHO", config = config, server = server) + """Read/compute artificial hourglass energy by calling the readers + defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.artificial_hourglass_energy() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.artificial_hourglass_energy( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ENG_AHO", config=config, server=server) self._inputs = InputsArtificialHourglassEnergy(self) self._outputs = OutputsArtificialHourglassEnergy(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute artificial hourglass energy by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute artificial hourglass energy by calling the readers + defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENG_AHO") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENG_AHO", server=server) @property def inputs(self): @@ -94,275 +240,291 @@ def inputs(self): Returns -------- - inputs : InputsArtificialHourglassEnergy + inputs : InputsArtificialHourglassEnergy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsArtificialHourglassEnergy + outputs : OutputsArtificialHourglassEnergy """ return super().outputs -#internal name: ENG_AHO -#scripting name: artificial_hourglass_energy class InputsArtificialHourglassEnergy(_Inputs): - """Intermediate class used to connect user inputs to artificial_hourglass_energy operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.artificial_hourglass_energy() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + artificial_hourglass_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.artificial_hourglass_energy() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(artificial_hourglass_energy._spec().inputs, op) - self._time_scoping = Input(artificial_hourglass_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + artificial_hourglass_energy._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(artificial_hourglass_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + artificial_hourglass_energy._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(artificial_hourglass_energy._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + artificial_hourglass_energy._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(artificial_hourglass_energy._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + artificial_hourglass_energy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(artificial_hourglass_energy._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + artificial_hourglass_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(artificial_hourglass_energy._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + artificial_hourglass_energy._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(artificial_hourglass_energy._spec().input_pin(7), 7, op, -1) + self._mesh = Input(artificial_hourglass_energy._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(artificial_hourglass_energy._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + artificial_hourglass_energy._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsArtificialHourglassEnergy(_Outputs): - """Intermediate class used to get outputs from artificial_hourglass_energy operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.artificial_hourglass_energy() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + artificial_hourglass_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.artificial_hourglass_energy() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(artificial_hourglass_energy._spec().outputs, op) - self._fields_container = Output(artificial_hourglass_energy._spec().output_pin(0), 0, op) + self._fields_container = Output( + artificial_hourglass_energy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.artificial_hourglass_energy() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/cms_matrices_provider.py b/ansys/dpf/core/operators/result/cms_matrices_provider.py index 3287fd2a599..42ed8199244 100644 --- a/ansys/dpf/core/operators/result/cms_matrices_provider.py +++ b/ansys/dpf/core/operators/result/cms_matrices_provider.py @@ -1,60 +1,96 @@ """ cms_matrices_provider -===================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cms_matrices_provider(Operator): - """Read reducted matrices for cms elements. Extract stiffness, damping, mass matrices and load vector from a subfile. + """Read reducted matrices for cms elements. Extract stiffness, damping, + mass matrices and load vector from a subfile. + + Parameters + ---------- + data_sources : DataSources + Data_sources (must contain at list one + subfile). - available inputs: - - data_sources (DataSources) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.result.cms_matrices_provider() - >>> # Instantiate operator - >>> op = dpf.operators.result.cms_matrices_provider() + >>> # Make input connections + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cms_matrices_provider( + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cms_matrices_provider(data_sources=my_data_sources) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, data_sources=None, config=None, server=None): - super().__init__(name="cms_matrices_provider", config = config, server = server) + super().__init__(name="cms_matrices_provider", config=config, server=server) self._inputs = InputsCmsMatricesProvider(self) self._outputs = OutputsCmsMatricesProvider(self) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read reducted matrices for cms elements. Extract stiffness, damping, mass matrices and load vector from a subfile.""", - map_input_pin_spec={ - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""Data_sources (must contain at list one subfile).""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""Fields container containing in this order : stiffness, damping, mass matrices, and then load vector.""")}) + description = """Read reducted matrices for cms elements. Extract stiffness, damping, + mass matrices and load vector from a subfile.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data_sources (must contain at list one + subfile).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fields container containing in this order : + stiffness, damping, mass matrices, + and then load vector.""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cms_matrices_provider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cms_matrices_provider", server=server) @property def inputs(self): @@ -62,95 +98,94 @@ def inputs(self): Returns -------- - inputs : InputsCmsMatricesProvider + inputs : InputsCmsMatricesProvider """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCmsMatricesProvider + outputs : OutputsCmsMatricesProvider """ return super().outputs -#internal name: cms_matrices_provider -#scripting name: cms_matrices_provider class InputsCmsMatricesProvider(_Inputs): - """Intermediate class used to connect user inputs to cms_matrices_provider operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cms_matrices_provider() - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + cms_matrices_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cms_matrices_provider() + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(cms_matrices_provider._spec().inputs, op) - self._data_sources = Input(cms_matrices_provider._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + cms_matrices_provider._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: Data_sources (must contain at list one subfile). + Data_sources (must contain at list one + subfile). Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cms_matrices_provider() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsCmsMatricesProvider(_Outputs): - """Intermediate class used to get outputs from cms_matrices_provider operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cms_matrices_provider() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cms_matrices_provider operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cms_matrices_provider() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cms_matrices_provider._spec().outputs, op) - self._fields_container = Output(cms_matrices_provider._spec().output_pin(0), 0, op) + self._fields_container = Output( + cms_matrices_provider._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: Fields container containing in this order : stiffness, damping, mass matrices, and then load vector. - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cms_matrices_provider() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/co_energy.py b/ansys/dpf/core/operators/result/co_energy.py index c21e6a78d7f..19dda30d4fd 100644 --- a/ansys/dpf/core/operators/result/co_energy.py +++ b/ansys/dpf/core/operators/result/co_energy.py @@ -1,92 +1,238 @@ """ co_energy -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class co_energy(Operator): - """Read/compute co-energy (magnetics) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.co_energy() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.co_energy(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ENG_CO", config = config, server = server) + """Read/compute co-energy (magnetics) by calling the readers defined by + the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.co_energy() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.co_energy( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ENG_CO", config=config, server=server) self._inputs = InputsCoEnergy(self) self._outputs = OutputsCoEnergy(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute co-energy (magnetics) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute co-energy (magnetics) by calling the readers defined by + the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENG_CO") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENG_CO", server=server) @property def inputs(self): @@ -94,275 +240,275 @@ def inputs(self): Returns -------- - inputs : InputsCoEnergy + inputs : InputsCoEnergy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCoEnergy + outputs : OutputsCoEnergy """ return super().outputs -#internal name: ENG_CO -#scripting name: co_energy class InputsCoEnergy(_Inputs): - """Intermediate class used to connect user inputs to co_energy operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.co_energy() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + co_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.co_energy() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(co_energy._spec().inputs, op) - self._time_scoping = Input(co_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(co_energy._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(co_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(co_energy._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(co_energy._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(co_energy._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(co_energy._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(co_energy._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(co_energy._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(co_energy._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(co_energy._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(co_energy._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(co_energy._spec().input_pin(7), 7, op, -1) + self._mesh = Input(co_energy._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(co_energy._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(co_energy._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsCoEnergy(_Outputs): - """Intermediate class used to get outputs from co_energy operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.co_energy() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + co_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.co_energy() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(co_energy._spec().outputs, op) - self._fields_container = Output(co_energy._spec().output_pin(0), 0, op) + self._fields_container = Output(co_energy._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.co_energy() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain.py b/ansys/dpf/core/operators/result/compute_elastic_strain.py new file mode 100644 index 00000000000..8ea7647d893 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain.py @@ -0,0 +1,595 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:21. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain", config=config, server=server) + self._inputs = InputsComputeElasticStrain(self) + self._outputs = OutputsComputeElasticStrain(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrain + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrain + """ + return super().outputs + + +class InputsComputeElasticStrain(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_elastic_strain._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrain(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_1.py b/ansys/dpf/core/operators/result/compute_elastic_strain_1.py new file mode 100644 index 00000000000..6810b75316b --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_1.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:22. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_1(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the 1st principal component. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_1() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_1( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_1", config=config, server=server) + self._inputs = InputsComputeElasticStrain1(self) + self._outputs = OutputsComputeElasticStrain1(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the 1st + principal component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_1", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrain1 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrain1 + """ + return super().outputs + + +class InputsComputeElasticStrain1(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_1._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_1._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_1._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_1._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_1._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_1._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_1._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_1._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_1._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_1._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrain1(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_1._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_1._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_2.py b/ansys/dpf/core/operators/result/compute_elastic_strain_2.py new file mode 100644 index 00000000000..dab971126c5 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_2.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:22. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_2(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the 2nd principal component. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_2() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_2( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_2", config=config, server=server) + self._inputs = InputsComputeElasticStrain2(self) + self._outputs = OutputsComputeElasticStrain2(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the 2nd + principal component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_2", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrain2 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrain2 + """ + return super().outputs + + +class InputsComputeElasticStrain2(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_2._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_2._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_2._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_2._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_2._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_2._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_2._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_2._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_2._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_2._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrain2(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_2._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_2._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_3.py b/ansys/dpf/core/operators/result/compute_elastic_strain_3.py new file mode 100644 index 00000000000..17be341cf28 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_3.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:20. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_3(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the 3rd principal component. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_3() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_3( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_3", config=config, server=server) + self._inputs = InputsComputeElasticStrain3(self) + self._outputs = OutputsComputeElasticStrain3(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the 3rd + principal component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_3", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrain3 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrain3 + """ + return super().outputs + + +class InputsComputeElasticStrain3(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_3._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_3._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_3._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_3._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_3._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_3._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_3._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_3._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_3._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_3._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrain3(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_3._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_3._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_X.py b/ansys/dpf/core/operators/result/compute_elastic_strain_X.py new file mode 100644 index 00000000000..a65d4392312 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_X.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:21. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_X(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the XX normal component (00 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_X( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_X", config=config, server=server) + self._inputs = InputsComputeElasticStrainX(self) + self._outputs = OutputsComputeElasticStrainX(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the XX normal + component (00 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_X", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrainX + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrainX + """ + return super().outputs + + +class InputsComputeElasticStrainX(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_X._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_X._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_X._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_X._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_X._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_X._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_X._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_X._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_X._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_X._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrainX(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_X._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_X._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_XY.py b/ansys/dpf/core/operators/result/compute_elastic_strain_XY.py new file mode 100644 index 00000000000..b1c5b0dfe80 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_XY.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:21. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_XY(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the XY shear component (01 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_XY() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_XY( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_XY", config=config, server=server) + self._inputs = InputsComputeElasticStrainXy(self) + self._outputs = OutputsComputeElasticStrainXy(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the XY shear + component (01 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_XY", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrainXy + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrainXy + """ + return super().outputs + + +class InputsComputeElasticStrainXy(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_XY._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_XY._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_XY._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_XY._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_XY._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_XY._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_XY._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_XY._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_XY._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_XY._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrainXy(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_XY._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_XY._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_XZ.py b/ansys/dpf/core/operators/result/compute_elastic_strain_XZ.py new file mode 100644 index 00000000000..bfb0f11f83f --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_XZ.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:21. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_XZ(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the XZ shear component (02 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_XZ( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_XZ", config=config, server=server) + self._inputs = InputsComputeElasticStrainXz(self) + self._outputs = OutputsComputeElasticStrainXz(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the XZ shear + component (02 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_XZ", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrainXz + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrainXz + """ + return super().outputs + + +class InputsComputeElasticStrainXz(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_XZ._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_XZ._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_XZ._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_XZ._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_XZ._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_XZ._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_XZ._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_XZ._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_XZ._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_XZ._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrainXz(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_XZ._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_XZ._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_Y.py b/ansys/dpf/core/operators/result/compute_elastic_strain_Y.py new file mode 100644 index 00000000000..506f7890861 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_Y.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:18. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_Y(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the YY normal component (11 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_Y( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_Y", config=config, server=server) + self._inputs = InputsComputeElasticStrainY(self) + self._outputs = OutputsComputeElasticStrainY(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the YY normal + component (11 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_Y", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrainY + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrainY + """ + return super().outputs + + +class InputsComputeElasticStrainY(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_Y._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_Y._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_Y._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_Y._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_Y._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_Y._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_Y._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_Y._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_Y._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_Y._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrainY(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_Y._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_Y._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_YZ.py b/ansys/dpf/core/operators/result/compute_elastic_strain_YZ.py new file mode 100644 index 00000000000..497cdd0b099 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_YZ.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:20. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_YZ(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the YZ shear component (12 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_YZ( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_YZ", config=config, server=server) + self._inputs = InputsComputeElasticStrainYz(self) + self._outputs = OutputsComputeElasticStrainYz(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the YZ shear + component (12 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_YZ", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrainYz + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrainYz + """ + return super().outputs + + +class InputsComputeElasticStrainYz(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_YZ._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_YZ._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_YZ._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_YZ._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_YZ._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_YZ._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_YZ._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_YZ._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_YZ._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_YZ._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrainYz(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_YZ._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_YZ._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_elastic_strain_Z.py b/ansys/dpf/core/operators/result/compute_elastic_strain_Z.py new file mode 100644 index 00000000000..e79921b1bf8 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_elastic_strain_Z.py @@ -0,0 +1,598 @@ +"""Autogenerated DPF operator classes. + +Created on 12/06/2021, 14:29:21. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_elastic_strain_Z(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. Not all strain + formulations are supported (only B-Bar). All coordinates are + global coordinates.Get the ZZ normal component (22 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_elastic_strain_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_elastic_strain_Z( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_elastic_strain_Z", config=config, server=server) + self._inputs = InputsComputeElasticStrainZ(self) + self._outputs = OutputsComputeElasticStrainZ(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + Not all strain formulations are supported (only B-Bar). + All coordinates are global coordinates.Get the ZZ normal + component (22 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_elastic_strain_Z", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeElasticStrainZ + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeElasticStrainZ + """ + return super().outputs + + +class InputsComputeElasticStrainZ(_Inputs): + """Intermediate class used to connect user inputs to + compute_elastic_strain_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_Z._spec().inputs, op) + self._time_scoping = Input( + compute_elastic_strain_Z._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_elastic_strain_Z._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_elastic_strain_Z._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_elastic_strain_Z._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_elastic_strain_Z._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input( + compute_elastic_strain_Z._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_elastic_strain_Z._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_elastic_strain_Z._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_elastic_strain_Z._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeElasticStrainZ(_Outputs): + """Intermediate class used to get outputs from + compute_elastic_strain_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_elastic_strain_Z._spec().outputs, op) + self._fields_container = Output( + compute_elastic_strain_Z._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_elastic_strain_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress.py b/ansys/dpf/core/operators/result/compute_stress.py new file mode 100644 index 00000000000..2e91008181e --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress.py @@ -0,0 +1,370 @@ +""" +compute_stress +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent. + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress", config=config, server=server) + self._inputs = InputsComputeStress(self) + self._outputs = OutputsComputeStress(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStress + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStress + """ + return super().outputs + + +class InputsComputeStress(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress._spec().inputs, op) + self._scoping = Input(compute_stress._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input(compute_stress._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input(compute_stress._spec().input_pin(9), 9, op, -1) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStress(_Outputs): + """Intermediate class used to get outputs from + compute_stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress._spec().outputs, op) + self._fields_container = Output(compute_stress._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_1.py b/ansys/dpf/core/operators/result/compute_stress_1.py new file mode 100644 index 00000000000..8d26447991c --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_1.py @@ -0,0 +1,374 @@ +""" +compute_stress_1 +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_1(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the 1st principal component. + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_1() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_1( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_1", config=config, server=server) + self._inputs = InputsComputeStress1(self) + self._outputs = OutputsComputeStress1(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the 1st principal component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_1", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStress1 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStress1 + """ + return super().outputs + + +class InputsComputeStress1(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_1() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_1._spec().inputs, op) + self._scoping = Input(compute_stress_1._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_1._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_1._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_1._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_1._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_1() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_1() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_1() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_1() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_1() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStress1(_Outputs): + """Intermediate class used to get outputs from + compute_stress_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_1._spec().outputs, op) + self._fields_container = Output(compute_stress_1._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_2.py b/ansys/dpf/core/operators/result/compute_stress_2.py new file mode 100644 index 00000000000..41859567c6a --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_2.py @@ -0,0 +1,374 @@ +""" +compute_stress_2 +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_2(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the 2nd principal component. + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_2() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_2( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_2", config=config, server=server) + self._inputs = InputsComputeStress2(self) + self._outputs = OutputsComputeStress2(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the 2nd principal component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_2", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStress2 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStress2 + """ + return super().outputs + + +class InputsComputeStress2(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_2() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_2._spec().inputs, op) + self._scoping = Input(compute_stress_2._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_2._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_2._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_2._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_2._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_2() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_2() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_2() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_2() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_2() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStress2(_Outputs): + """Intermediate class used to get outputs from + compute_stress_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_2._spec().outputs, op) + self._fields_container = Output(compute_stress_2._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_3.py b/ansys/dpf/core/operators/result/compute_stress_3.py new file mode 100644 index 00000000000..717a8af8fba --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_3.py @@ -0,0 +1,374 @@ +""" +compute_stress_3 +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_3(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the 3rd principal component. + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_3() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_3( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_3", config=config, server=server) + self._inputs = InputsComputeStress3(self) + self._outputs = OutputsComputeStress3(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the 3rd principal component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_3", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStress3 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStress3 + """ + return super().outputs + + +class InputsComputeStress3(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_3() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_3._spec().inputs, op) + self._scoping = Input(compute_stress_3._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_3._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_3._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_3._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_3._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_3() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_3() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_3() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_3() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_3() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStress3(_Outputs): + """Intermediate class used to get outputs from + compute_stress_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_3._spec().outputs, op) + self._fields_container = Output(compute_stress_3._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_X.py b/ansys/dpf/core/operators/result/compute_stress_X.py new file mode 100644 index 00000000000..8e5e378d96a --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_X.py @@ -0,0 +1,374 @@ +""" +compute_stress_X +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_X(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the XX normal component (00 component). + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_X() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_X( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_X", config=config, server=server) + self._inputs = InputsComputeStressX(self) + self._outputs = OutputsComputeStressX(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the XX normal component (00 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_X", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStressX + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStressX + """ + return super().outputs + + +class InputsComputeStressX(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_X() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_X._spec().inputs, op) + self._scoping = Input(compute_stress_X._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_X._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_X._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_X._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_X._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_X() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_X() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_X() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_X() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_X() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStressX(_Outputs): + """Intermediate class used to get outputs from + compute_stress_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_X._spec().outputs, op) + self._fields_container = Output(compute_stress_X._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_XY.py b/ansys/dpf/core/operators/result/compute_stress_XY.py new file mode 100644 index 00000000000..3a19e6fe7aa --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_XY.py @@ -0,0 +1,374 @@ +""" +compute_stress_XY +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_XY(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the XY shear component (01 component). + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_XY() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_XY( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_XY", config=config, server=server) + self._inputs = InputsComputeStressXy(self) + self._outputs = OutputsComputeStressXy(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the XY shear component (01 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_XY", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStressXy + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStressXy + """ + return super().outputs + + +class InputsComputeStressXy(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XY() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_XY._spec().inputs, op) + self._scoping = Input(compute_stress_XY._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_XY._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_XY._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_XY._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_XY._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XY() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XY() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XY() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XY() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XY() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStressXy(_Outputs): + """Intermediate class used to get outputs from + compute_stress_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_XY._spec().outputs, op) + self._fields_container = Output(compute_stress_XY._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_XZ.py b/ansys/dpf/core/operators/result/compute_stress_XZ.py new file mode 100644 index 00000000000..b2b593eb200 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_XZ.py @@ -0,0 +1,374 @@ +""" +compute_stress_XZ +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_XZ(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the XZ shear component (02 component). + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_XZ() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_XZ( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_XZ", config=config, server=server) + self._inputs = InputsComputeStressXz(self) + self._outputs = OutputsComputeStressXz(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the XZ shear component (02 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_XZ", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStressXz + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStressXz + """ + return super().outputs + + +class InputsComputeStressXz(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XZ() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_XZ._spec().inputs, op) + self._scoping = Input(compute_stress_XZ._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_XZ._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_XZ._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_XZ._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_XZ._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XZ() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XZ() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XZ() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XZ() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XZ() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStressXz(_Outputs): + """Intermediate class used to get outputs from + compute_stress_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_XZ._spec().outputs, op) + self._fields_container = Output(compute_stress_XZ._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_Y.py b/ansys/dpf/core/operators/result/compute_stress_Y.py new file mode 100644 index 00000000000..ad5a4d32afd --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_Y.py @@ -0,0 +1,374 @@ +""" +compute_stress_Y +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_Y(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the YY normal component (11 component). + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_Y() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_Y( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_Y", config=config, server=server) + self._inputs = InputsComputeStressY(self) + self._outputs = OutputsComputeStressY(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the YY normal component (11 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_Y", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStressY + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStressY + """ + return super().outputs + + +class InputsComputeStressY(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Y() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_Y._spec().inputs, op) + self._scoping = Input(compute_stress_Y._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_Y._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_Y._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_Y._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_Y._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Y() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Y() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Y() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Y() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Y() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStressY(_Outputs): + """Intermediate class used to get outputs from + compute_stress_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_Y._spec().outputs, op) + self._fields_container = Output(compute_stress_Y._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_YZ.py b/ansys/dpf/core/operators/result/compute_stress_YZ.py new file mode 100644 index 00000000000..562e6d208e6 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_YZ.py @@ -0,0 +1,374 @@ +""" +compute_stress_YZ +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_YZ(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the YZ shear component (12 component). + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_YZ() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_YZ( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_YZ", config=config, server=server) + self._inputs = InputsComputeStressYz(self) + self._outputs = OutputsComputeStressYz(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the YZ shear component (12 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_YZ", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStressYz + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStressYz + """ + return super().outputs + + +class InputsComputeStressYz(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_YZ() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_YZ._spec().inputs, op) + self._scoping = Input(compute_stress_YZ._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_YZ._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_YZ._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_YZ._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_YZ._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_YZ() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_YZ() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_YZ() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_YZ() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_YZ() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStressYz(_Outputs): + """Intermediate class used to get outputs from + compute_stress_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_YZ._spec().outputs, op) + self._fields_container = Output(compute_stress_YZ._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_Z.py b/ansys/dpf/core/operators/result/compute_stress_Z.py new file mode 100644 index 00000000000..67d3032a491 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_Z.py @@ -0,0 +1,374 @@ +""" +compute_stress_Z +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_Z(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the ZZ normal component (22 component). + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_Z() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_Z( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_Z", config=config, server=server) + self._inputs = InputsComputeStressZ(self) + self._outputs = OutputsComputeStressZ(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the ZZ normal component (22 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_Z", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStressZ + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStressZ + """ + return super().outputs + + +class InputsComputeStressZ(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Z() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_Z._spec().inputs, op) + self._scoping = Input(compute_stress_Z._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_Z._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_stress_Z._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_Z._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_Z._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Z() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Z() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Z() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Z() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Z() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStressZ(_Outputs): + """Intermediate class used to get outputs from + compute_stress_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_Z._spec().outputs, op) + self._fields_container = Output(compute_stress_Z._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_stress_von_mises.py b/ansys/dpf/core/operators/result/compute_stress_von_mises.py new file mode 100644 index 00000000000..03554486ebf --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_stress_von_mises.py @@ -0,0 +1,378 @@ +""" +compute_stress_von_mises +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_stress_von_mises(Operator): + """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and integration + schemes are supported. Only isotropic materials are supported. + Material nonlinearity is not supported. Only constant materials + are supported. All coordinates are global coordinates. All units + need to be consistent.Get the Von Mises equivalent stress. + + Parameters + ---------- + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + data_sources : DataSources, optional + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + strain : FieldsContainer or Field + Field/or fields container containing only the + elastic strain field (element nodal). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_stress_von_mises() + + >>> # Make input connections + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_stress_von_mises( + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... strain=my_strain, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + scoping=None, + streams_container=None, + data_sources=None, + requested_location=None, + strain=None, + config=None, + server=None, + ): + super().__init__(name="compute_stress_von_mises", config=config, server=server) + self._inputs = InputsComputeStressVonMises(self) + self._outputs = OutputsComputeStressVonMises(self) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if strain is not None: + self.inputs.strain.connect(strain) + + @staticmethod + def _spec(): + description = """Computes the stress from an elastic strain field.Only some 3-D + elements (only hexa, tetra, pyramid and wedge) and + integration schemes are supported. Only isotropic + materials are supported. Material nonlinearity is not + supported. Only constant materials are supported. All + coordinates are global coordinates. All units need to be + consistent.Get the Von Mises equivalent stress.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a data_sources have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Needed to get mesh and material ids. optional + if a streams_container have been + connected.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="strain", + type_names=["fields_container", "field"], + optional=False, + document="""Field/or fields container containing only the + elastic strain field (element nodal).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_stress_von_mises", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeStressVonMises + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeStressVonMises + """ + return super().outputs + + +class InputsComputeStressVonMises(_Inputs): + """Intermediate class used to connect user inputs to + compute_stress_von_mises operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_von_mises() + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_strain = dpf.FieldsContainer() + >>> op.inputs.strain.connect(my_strain) + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_von_mises._spec().inputs, op) + self._scoping = Input(compute_stress_von_mises._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_stress_von_mises._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_stress_von_mises._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._requested_location = Input( + compute_stress_von_mises._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._strain = Input(compute_stress_von_mises._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._strain) + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_von_mises() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Needed to get mesh and material ids. optional + if a data_sources have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_von_mises() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Needed to get mesh and material ids. optional + if a streams_container have been + connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_von_mises() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_von_mises() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def strain(self): + """Allows to connect strain input to the operator. + + Field/or fields container containing only the + elastic strain field (element nodal). + + Parameters + ---------- + my_strain : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_von_mises() + >>> op.inputs.strain.connect(my_strain) + >>> # or + >>> op.inputs.strain(my_strain) + """ + return self._strain + + +class OutputsComputeStressVonMises(_Outputs): + """Intermediate class used to get outputs from + compute_stress_von_mises operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_von_mises() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_stress_von_mises._spec().outputs, op) + self._fields_container = Output( + compute_stress_von_mises._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_stress_von_mises() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain.py b/ansys/dpf/core/operators/result/compute_total_strain.py new file mode 100644 index 00000000000..9a56a2e3d6a --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain.py @@ -0,0 +1,589 @@ +""" +compute_total_strain +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain", config=config, server=server) + self._inputs = InputsComputeTotalStrain(self) + self._outputs = OutputsComputeTotalStrain(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrain + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrain + """ + return super().outputs + + +class InputsComputeTotalStrain(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain._spec().inputs, op) + self._time_scoping = Input(compute_total_strain._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(compute_total_strain._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._extrapolate = Input(compute_total_strain._spec().input_pin(5), 5, op, -1) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrain(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain._spec().outputs, op) + self._fields_container = Output( + compute_total_strain._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_1.py b/ansys/dpf/core/operators/result/compute_total_strain_1.py new file mode 100644 index 00000000000..e55adabff02 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_1.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_1 +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_1(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the 1st principal component. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_1() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_1( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_1", config=config, server=server) + self._inputs = InputsComputeTotalStrain1(self) + self._outputs = OutputsComputeTotalStrain1(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the 1st principal + component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_1", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrain1 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrain1 + """ + return super().outputs + + +class InputsComputeTotalStrain1(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_1._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_1._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_1._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_1._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_1._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_1._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_1._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_1._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_1._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_1._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrain1(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_1._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_1._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_2.py b/ansys/dpf/core/operators/result/compute_total_strain_2.py new file mode 100644 index 00000000000..c28822c89dc --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_2.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_2 +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_2(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the 2nd principal component. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_2() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_2( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_2", config=config, server=server) + self._inputs = InputsComputeTotalStrain2(self) + self._outputs = OutputsComputeTotalStrain2(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the 2nd principal + component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_2", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrain2 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrain2 + """ + return super().outputs + + +class InputsComputeTotalStrain2(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_2._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_2._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_2._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_2._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_2._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_2._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_2._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_2._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_2._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_2._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrain2(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_2._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_2._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_3.py b/ansys/dpf/core/operators/result/compute_total_strain_3.py new file mode 100644 index 00000000000..4606f15b4d2 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_3.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_3 +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_3(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the 3rd principal component. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_3() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_3( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_3", config=config, server=server) + self._inputs = InputsComputeTotalStrain3(self) + self._outputs = OutputsComputeTotalStrain3(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the 3rd principal + component.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_3", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrain3 + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrain3 + """ + return super().outputs + + +class InputsComputeTotalStrain3(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_3._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_3._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_3._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_3._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_3._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_3._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_3._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_3._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_3._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_3._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrain3(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_3._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_3._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_X.py b/ansys/dpf/core/operators/result/compute_total_strain_X.py new file mode 100644 index 00000000000..159d44d7c62 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_X.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_X +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_X(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the XX normal component (00 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_X( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_X", config=config, server=server) + self._inputs = InputsComputeTotalStrainX(self) + self._outputs = OutputsComputeTotalStrainX(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the XX normal component + (00 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_X", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrainX + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrainX + """ + return super().outputs + + +class InputsComputeTotalStrainX(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_X._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_X._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_X._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_X._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_X._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_X._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_X._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_X._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_X._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_X._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrainX(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_X._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_X._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_XY.py b/ansys/dpf/core/operators/result/compute_total_strain_XY.py new file mode 100644 index 00000000000..981c32e6faa --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_XY.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_XY +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_XY(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the XY shear component (01 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_XY() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_XY( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_XY", config=config, server=server) + self._inputs = InputsComputeTotalStrainXy(self) + self._outputs = OutputsComputeTotalStrainXy(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the XY shear component (01 + component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_XY", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrainXy + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrainXy + """ + return super().outputs + + +class InputsComputeTotalStrainXy(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_XY._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_XY._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_XY._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_XY._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_XY._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_XY._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_XY._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_XY._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_XY._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_XY._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrainXy(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_XY._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_XY._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_XZ.py b/ansys/dpf/core/operators/result/compute_total_strain_XZ.py new file mode 100644 index 00000000000..299bcaa19c0 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_XZ.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_XZ +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_XZ(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the XZ shear component (02 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_XZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_XZ( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_XZ", config=config, server=server) + self._inputs = InputsComputeTotalStrainXz(self) + self._outputs = OutputsComputeTotalStrainXz(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the XZ shear component (02 + component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_XZ", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrainXz + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrainXz + """ + return super().outputs + + +class InputsComputeTotalStrainXz(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_XZ._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_XZ._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_XZ._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_XZ._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_XZ._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_XZ._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_XZ._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_XZ._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_XZ._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_XZ._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrainXz(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_XZ._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_XZ._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_Y.py b/ansys/dpf/core/operators/result/compute_total_strain_Y.py new file mode 100644 index 00000000000..a709e7ad6d3 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_Y.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_Y +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_Y(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the YY normal component (11 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_Y( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_Y", config=config, server=server) + self._inputs = InputsComputeTotalStrainY(self) + self._outputs = OutputsComputeTotalStrainY(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the YY normal component + (11 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_Y", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrainY + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrainY + """ + return super().outputs + + +class InputsComputeTotalStrainY(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_Y._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_Y._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_Y._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_Y._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_Y._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_Y._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_Y._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_Y._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_Y._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_Y._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrainY(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_Y._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_Y._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_YZ.py b/ansys/dpf/core/operators/result/compute_total_strain_YZ.py new file mode 100644 index 00000000000..e07b3f0b357 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_YZ.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_YZ +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_YZ(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the YZ shear component (12 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_YZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_YZ( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_YZ", config=config, server=server) + self._inputs = InputsComputeTotalStrainYz(self) + self._outputs = OutputsComputeTotalStrainYz(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the YZ shear component (12 + component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_YZ", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrainYz + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrainYz + """ + return super().outputs + + +class InputsComputeTotalStrainYz(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_YZ._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_YZ._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_YZ._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_YZ._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_YZ._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_YZ._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_YZ._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_YZ._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_YZ._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_YZ._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrainYz(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_YZ._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_YZ._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/compute_total_strain_Z.py b/ansys/dpf/core/operators/result/compute_total_strain_Z.py new file mode 100644 index 00000000000..86171d78162 --- /dev/null +++ b/ansys/dpf/core/operators/result/compute_total_strain_Z.py @@ -0,0 +1,597 @@ +""" +compute_total_strain_Z +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class compute_total_strain_Z(Operator): + """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, pyramid + and wedge). Layered elements are not supported. All coordinates + are global coordinates. Not all strain formulations are supported. + Get the ZZ normal component (22 component). + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + scoping : Scoping, optional + The element scoping on which the result is + computed. + streams_container : StreamsContainer, optional + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + data_sources : DataSources, optional + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + extrapolate : int, optional + Whether to extrapolate the data from the + integration points to the nodes. + nonlinear : int, optional + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + abstract_meshed_region : MeshedRegion, optional + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + requested_location : str, optional + Average the elemental nodal result to the + requested location. + displacement : FieldsContainer or Field, optional + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.compute_total_strain_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.compute_total_strain_Z( + ... time_scoping=my_time_scoping, + ... scoping=my_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... extrapolate=my_extrapolate, + ... nonlinear=my_nonlinear, + ... abstract_meshed_region=my_abstract_meshed_region, + ... requested_location=my_requested_location, + ... displacement=my_displacement, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + scoping=None, + streams_container=None, + data_sources=None, + extrapolate=None, + nonlinear=None, + abstract_meshed_region=None, + requested_location=None, + displacement=None, + config=None, + server=None, + ): + super().__init__(name="compute_total_strain_Z", config=config, server=server) + self._inputs = InputsComputeTotalStrainZ(self) + self._outputs = OutputsComputeTotalStrainZ(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if scoping is not None: + self.inputs.scoping.connect(scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if extrapolate is not None: + self.inputs.extrapolate.connect(extrapolate) + if nonlinear is not None: + self.inputs.nonlinear.connect(nonlinear) + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if displacement is not None: + self.inputs.displacement.connect(displacement) + + @staticmethod + def _spec(): + description = """Computes the strain from a displacement field. Only some 3-D elements + and integration schemes are supported (only hexa, tetra, + pyramid and wedge). Layered elements are not supported. + All coordinates are global coordinates. Not all strain + formulations are supported. Get the ZZ normal component + (22 component).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator).""", + ), + 1: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=True, + document="""The element scoping on which the result is + computed.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected.""", + ), + 5: PinSpecification( + name="extrapolate", + type_names=["int32"], + optional=True, + document="""Whether to extrapolate the data from the + integration points to the nodes.""", + ), + 6: PinSpecification( + name="nonlinear", + type_names=["int32"], + optional=True, + document="""Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity).""", + ), + 7: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support.""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Average the elemental nodal result to the + requested location.""", + ), + 10: PinSpecification( + name="displacement", + type_names=["fields_container", "field"], + optional=True, + document="""Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""The computed result fields container + (elemental nodal).""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="compute_total_strain_Z", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsComputeTotalStrainZ + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsComputeTotalStrainZ + """ + return super().outputs + + +class InputsComputeTotalStrainZ(_Inputs): + """Intermediate class used to connect user inputs to + compute_total_strain_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_scoping = dpf.Scoping() + >>> op.inputs.scoping.connect(my_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_extrapolate = int() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> my_nonlinear = int() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_displacement = dpf.FieldsContainer() + >>> op.inputs.displacement.connect(my_displacement) + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_Z._spec().inputs, op) + self._time_scoping = Input( + compute_total_strain_Z._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._scoping = Input(compute_total_strain_Z._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._scoping) + self._streams_container = Input( + compute_total_strain_Z._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + compute_total_strain_Z._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._extrapolate = Input( + compute_total_strain_Z._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._extrapolate) + self._nonlinear = Input(compute_total_strain_Z._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._nonlinear) + self._abstract_meshed_region = Input( + compute_total_strain_Z._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._requested_location = Input( + compute_total_strain_Z._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._displacement = Input( + compute_total_strain_Z._spec().input_pin(10), 10, op, -1 + ) + self._inputs.append(self._displacement) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output. will only be used if no + displacement input is given (will be + applied on displacement operator). + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def scoping(self): + """Allows to connect scoping input to the operator. + + The element scoping on which the result is + computed. + + Parameters + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.scoping.connect(my_scoping) + >>> # or + >>> op.inputs.scoping(my_scoping) + """ + return self._scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if a mesh or a data_sources have + been connected. required if no + displacement input have been + connected. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if a mesh or a streams_container + have been connected, or if the + displacement's field has a mesh + support. required if no displacement + input have been connected. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def extrapolate(self): + """Allows to connect extrapolate input to the operator. + + Whether to extrapolate the data from the + integration points to the nodes. + + Parameters + ---------- + my_extrapolate : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.extrapolate.connect(my_extrapolate) + >>> # or + >>> op.inputs.extrapolate(my_extrapolate) + """ + return self._extrapolate + + @property + def nonlinear(self): + """Allows to connect nonlinear input to the operator. + + Whether to use nonlinear geometry or + nonlinear material (1 = large strain, + 2 = hyperelasticity). + + Parameters + ---------- + my_nonlinear : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.nonlinear.connect(my_nonlinear) + >>> # or + >>> op.inputs.nonlinear(my_nonlinear) + """ + return self._nonlinear + + @property + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. + + The underlying mesh. optional if a + data_sources or a streams_container + have been connected, or if the + displacement's field has a mesh + support. + + Parameters + ---------- + my_abstract_meshed_region : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + """ + return self._abstract_meshed_region + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Average the elemental nodal result to the + requested location. + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def displacement(self): + """Allows to connect displacement input to the operator. + + Field/or fields container containing only the + displacement field (nodal). if none + specified, read displacements from + result file using the data_sources. + + Parameters + ---------- + my_displacement : FieldsContainer or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> op.inputs.displacement.connect(my_displacement) + >>> # or + >>> op.inputs.displacement(my_displacement) + """ + return self._displacement + + +class OutputsComputeTotalStrainZ(_Outputs): + """Intermediate class used to get outputs from + compute_total_strain_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(compute_total_strain_Z._spec().outputs, op) + self._fields_container = Output( + compute_total_strain_Z._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.compute_total_strain_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py b/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py index 8068613e110..b58043d51d7 100644 --- a/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py +++ b/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py @@ -1,98 +1,274 @@ """ contact_fluid_penetration_pressure -================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_fluid_penetration_pressure(Operator): - """Read/compute element actual applied fluid penetration pressure by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_fluid_penetration_pressure(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_FRES", config = config, server = server) + """Read/compute element actual applied fluid penetration pressure by + calling the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_fluid_penetration_pressure() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_fluid_penetration_pressure( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_FRES", config=config, server=server) self._inputs = InputsContactFluidPenetrationPressure(self) self._outputs = OutputsContactFluidPenetrationPressure(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element actual applied fluid penetration pressure by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element actual applied fluid penetration pressure by + calling the readers defined by the datasources. Regarding + the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_FRES") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_FRES", server=server) @property def inputs(self): @@ -100,301 +276,347 @@ def inputs(self): Returns -------- - inputs : InputsContactFluidPenetrationPressure + inputs : InputsContactFluidPenetrationPressure """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactFluidPenetrationPressure + outputs : OutputsContactFluidPenetrationPressure """ return super().outputs -#internal name: ECT_FRES -#scripting name: contact_fluid_penetration_pressure class InputsContactFluidPenetrationPressure(_Inputs): - """Intermediate class used to connect user inputs to contact_fluid_penetration_pressure operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_fluid_penetration_pressure operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_fluid_penetration_pressure() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_fluid_penetration_pressure._spec().inputs, op) - self._time_scoping = Input(contact_fluid_penetration_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + contact_fluid_penetration_pressure._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_fluid_penetration_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + contact_fluid_penetration_pressure._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_fluid_penetration_pressure._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + contact_fluid_penetration_pressure._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_fluid_penetration_pressure._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + contact_fluid_penetration_pressure._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_fluid_penetration_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + contact_fluid_penetration_pressure._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_fluid_penetration_pressure._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_fluid_penetration_pressure._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_fluid_penetration_pressure._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + contact_fluid_penetration_pressure._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(contact_fluid_penetration_pressure._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + contact_fluid_penetration_pressure._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_fluid_penetration_pressure._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + contact_fluid_penetration_pressure._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + contact_fluid_penetration_pressure._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_fluid_penetration_pressure() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactFluidPenetrationPressure(_Outputs): - """Intermediate class used to get outputs from contact_fluid_penetration_pressure operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_fluid_penetration_pressure operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_fluid_penetration_pressure() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_fluid_penetration_pressure._spec().outputs, op) - self._fields_container = Output(contact_fluid_penetration_pressure._spec().output_pin(0), 0, op) + self._fields_container = Output( + contact_fluid_penetration_pressure._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_fluid_penetration_pressure() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/contact_friction_stress.py b/ansys/dpf/core/operators/result/contact_friction_stress.py index 786b8b47203..7d162b6548d 100644 --- a/ansys/dpf/core/operators/result/contact_friction_stress.py +++ b/ansys/dpf/core/operators/result/contact_friction_stress.py @@ -1,98 +1,274 @@ """ contact_friction_stress -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_friction_stress(Operator): - """Read/compute element contact friction stress by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_friction_stress() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_friction_stress(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_SFRIC", config = config, server = server) + """Read/compute element contact friction stress by calling the readers + defined by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_friction_stress() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_friction_stress( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_SFRIC", config=config, server=server) self._inputs = InputsContactFrictionStress(self) self._outputs = OutputsContactFrictionStress(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element contact friction stress by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element contact friction stress by calling the readers + defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_SFRIC") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_SFRIC", server=server) @property def inputs(self): @@ -100,301 +276,345 @@ def inputs(self): Returns -------- - inputs : InputsContactFrictionStress + inputs : InputsContactFrictionStress """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactFrictionStress + outputs : OutputsContactFrictionStress """ return super().outputs -#internal name: ECT_SFRIC -#scripting name: contact_friction_stress class InputsContactFrictionStress(_Inputs): - """Intermediate class used to connect user inputs to contact_friction_stress operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_friction_stress() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_friction_stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_friction_stress() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_friction_stress._spec().inputs, op) - self._time_scoping = Input(contact_friction_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + contact_friction_stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_friction_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + contact_friction_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_friction_stress._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + contact_friction_stress._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_friction_stress._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + contact_friction_stress._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_friction_stress._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + contact_friction_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_friction_stress._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_friction_stress._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_friction_stress._spec().input_pin(7), 7, op, -1) + self._mesh = Input(contact_friction_stress._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(contact_friction_stress._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + contact_friction_stress._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_friction_stress._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + contact_friction_stress._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + contact_friction_stress._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_friction_stress() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactFrictionStress(_Outputs): - """Intermediate class used to get outputs from contact_friction_stress operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_friction_stress() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_friction_stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_friction_stress() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_friction_stress._spec().outputs, op) - self._fields_container = Output(contact_friction_stress._spec().output_pin(0), 0, op) + self._fields_container = Output( + contact_friction_stress._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_friction_stress() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/contact_gap_distance.py b/ansys/dpf/core/operators/result/contact_gap_distance.py index 4921456ec4c..fff2abf064a 100644 --- a/ansys/dpf/core/operators/result/contact_gap_distance.py +++ b/ansys/dpf/core/operators/result/contact_gap_distance.py @@ -1,98 +1,274 @@ """ contact_gap_distance -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_gap_distance(Operator): - """Read/compute element contact gap distance by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_gap_distance() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_gap_distance(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_GAP", config = config, server = server) + """Read/compute element contact gap distance by calling the readers + defined by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_gap_distance() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_gap_distance( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_GAP", config=config, server=server) self._inputs = InputsContactGapDistance(self) self._outputs = OutputsContactGapDistance(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element contact gap distance by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element contact gap distance by calling the readers + defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_GAP") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_GAP", server=server) @property def inputs(self): @@ -100,301 +276,337 @@ def inputs(self): Returns -------- - inputs : InputsContactGapDistance + inputs : InputsContactGapDistance """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactGapDistance + outputs : OutputsContactGapDistance """ return super().outputs -#internal name: ECT_GAP -#scripting name: contact_gap_distance class InputsContactGapDistance(_Inputs): - """Intermediate class used to connect user inputs to contact_gap_distance operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_gap_distance() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_gap_distance operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_gap_distance() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_gap_distance._spec().inputs, op) - self._time_scoping = Input(contact_gap_distance._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(contact_gap_distance._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_gap_distance._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(contact_gap_distance._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_gap_distance._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + contact_gap_distance._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_gap_distance._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + contact_gap_distance._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_gap_distance._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(contact_gap_distance._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_gap_distance._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_gap_distance._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_gap_distance._spec().input_pin(7), 7, op, -1) + self._mesh = Input(contact_gap_distance._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(contact_gap_distance._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + contact_gap_distance._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_gap_distance._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + contact_gap_distance._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input(contact_gap_distance._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_gap_distance() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactGapDistance(_Outputs): - """Intermediate class used to get outputs from contact_gap_distance operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_gap_distance() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_gap_distance operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_gap_distance() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_gap_distance._spec().outputs, op) - self._fields_container = Output(contact_gap_distance._spec().output_pin(0), 0, op) + self._fields_container = Output( + contact_gap_distance._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_gap_distance() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/contact_penetration.py b/ansys/dpf/core/operators/result/contact_penetration.py index 1e671af2ac8..5da7e9ae8f4 100644 --- a/ansys/dpf/core/operators/result/contact_penetration.py +++ b/ansys/dpf/core/operators/result/contact_penetration.py @@ -1,98 +1,274 @@ """ contact_penetration -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_penetration(Operator): - """Read/compute element contact penetration by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_penetration() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_penetration(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_PENE", config = config, server = server) + """Read/compute element contact penetration by calling the readers + defined by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_penetration() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_penetration( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_PENE", config=config, server=server) self._inputs = InputsContactPenetration(self) self._outputs = OutputsContactPenetration(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element contact penetration by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element contact penetration by calling the readers + defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_PENE") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_PENE", server=server) @property def inputs(self): @@ -100,301 +276,335 @@ def inputs(self): Returns -------- - inputs : InputsContactPenetration + inputs : InputsContactPenetration """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactPenetration + outputs : OutputsContactPenetration """ return super().outputs -#internal name: ECT_PENE -#scripting name: contact_penetration class InputsContactPenetration(_Inputs): - """Intermediate class used to connect user inputs to contact_penetration operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_penetration() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_penetration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_penetration() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_penetration._spec().inputs, op) - self._time_scoping = Input(contact_penetration._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(contact_penetration._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_penetration._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(contact_penetration._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_penetration._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + contact_penetration._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_penetration._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + contact_penetration._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_penetration._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(contact_penetration._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_penetration._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_penetration._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_penetration._spec().input_pin(7), 7, op, -1) + self._mesh = Input(contact_penetration._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(contact_penetration._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + contact_penetration._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_penetration._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(contact_penetration._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(contact_penetration._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_penetration() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactPenetration(_Outputs): - """Intermediate class used to get outputs from contact_penetration operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_penetration() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_penetration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_penetration() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_penetration._spec().outputs, op) - self._fields_container = Output(contact_penetration._spec().output_pin(0), 0, op) + self._fields_container = Output( + contact_penetration._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_penetration() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/contact_pressure.py b/ansys/dpf/core/operators/result/contact_pressure.py index efc73ad9299..512b0dee06e 100644 --- a/ansys/dpf/core/operators/result/contact_pressure.py +++ b/ansys/dpf/core/operators/result/contact_pressure.py @@ -1,98 +1,274 @@ """ contact_pressure -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_pressure(Operator): - """Read/compute element contact pressure by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_pressure() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_pressure(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_PRES", config = config, server = server) + """Read/compute element contact pressure by calling the readers defined + by the datasources. Regarding the requested location and the input + mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_pressure() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_pressure( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_PRES", config=config, server=server) self._inputs = InputsContactPressure(self) self._outputs = OutputsContactPressure(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element contact pressure by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element contact pressure by calling the readers defined + by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_PRES") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_PRES", server=server) @property def inputs(self): @@ -100,301 +276,331 @@ def inputs(self): Returns -------- - inputs : InputsContactPressure + inputs : InputsContactPressure """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactPressure + outputs : OutputsContactPressure """ return super().outputs -#internal name: ECT_PRES -#scripting name: contact_pressure class InputsContactPressure(_Inputs): - """Intermediate class used to connect user inputs to contact_pressure operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_pressure() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_pressure operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_pressure() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_pressure._spec().inputs, op) - self._time_scoping = Input(contact_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(contact_pressure._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(contact_pressure._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_pressure._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(contact_pressure._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_pressure._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + contact_pressure._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(contact_pressure._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_pressure._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_pressure._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_pressure._spec().input_pin(7), 7, op, -1) + self._mesh = Input(contact_pressure._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(contact_pressure._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + contact_pressure._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_pressure._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(contact_pressure._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(contact_pressure._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_pressure() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactPressure(_Outputs): - """Intermediate class used to get outputs from contact_pressure operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_pressure() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_pressure operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_pressure() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_pressure._spec().outputs, op) - self._fields_container = Output(contact_pressure._spec().output_pin(0), 0, op) + self._fields_container = Output(contact_pressure._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_pressure() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/contact_sliding_distance.py b/ansys/dpf/core/operators/result/contact_sliding_distance.py index 78aff963d07..6e6b5b8d0ed 100644 --- a/ansys/dpf/core/operators/result/contact_sliding_distance.py +++ b/ansys/dpf/core/operators/result/contact_sliding_distance.py @@ -1,98 +1,274 @@ """ contact_sliding_distance -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_sliding_distance(Operator): - """Read/compute element contact sliding distance by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_sliding_distance() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_sliding_distance(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_SLIDE", config = config, server = server) + """Read/compute element contact sliding distance by calling the readers + defined by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_sliding_distance() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_sliding_distance( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_SLIDE", config=config, server=server) self._inputs = InputsContactSlidingDistance(self) self._outputs = OutputsContactSlidingDistance(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element contact sliding distance by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element contact sliding distance by calling the readers + defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_SLIDE") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_SLIDE", server=server) @property def inputs(self): @@ -100,301 +276,345 @@ def inputs(self): Returns -------- - inputs : InputsContactSlidingDistance + inputs : InputsContactSlidingDistance """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactSlidingDistance + outputs : OutputsContactSlidingDistance """ return super().outputs -#internal name: ECT_SLIDE -#scripting name: contact_sliding_distance class InputsContactSlidingDistance(_Inputs): - """Intermediate class used to connect user inputs to contact_sliding_distance operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_sliding_distance() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_sliding_distance operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_sliding_distance() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_sliding_distance._spec().inputs, op) - self._time_scoping = Input(contact_sliding_distance._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + contact_sliding_distance._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_sliding_distance._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + contact_sliding_distance._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_sliding_distance._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + contact_sliding_distance._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_sliding_distance._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + contact_sliding_distance._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_sliding_distance._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + contact_sliding_distance._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_sliding_distance._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_sliding_distance._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_sliding_distance._spec().input_pin(7), 7, op, -1) + self._mesh = Input(contact_sliding_distance._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(contact_sliding_distance._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + contact_sliding_distance._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_sliding_distance._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + contact_sliding_distance._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + contact_sliding_distance._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_sliding_distance() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactSlidingDistance(_Outputs): - """Intermediate class used to get outputs from contact_sliding_distance operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_sliding_distance() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_sliding_distance operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_sliding_distance() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_sliding_distance._spec().outputs, op) - self._fields_container = Output(contact_sliding_distance._spec().output_pin(0), 0, op) + self._fields_container = Output( + contact_sliding_distance._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_sliding_distance() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/contact_status.py b/ansys/dpf/core/operators/result/contact_status.py index b51f43e8fa2..ed01f153a3f 100644 --- a/ansys/dpf/core/operators/result/contact_status.py +++ b/ansys/dpf/core/operators/result/contact_status.py @@ -1,98 +1,274 @@ """ contact_status -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_status(Operator): - """Read/compute element contact status by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_status() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_status(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_STAT", config = config, server = server) + """Read/compute element contact status by calling the readers defined by + the datasources. Regarding the requested location and the input + mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_status() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_status( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_STAT", config=config, server=server) self._inputs = InputsContactStatus(self) self._outputs = OutputsContactStatus(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element contact status by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element contact status by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_STAT") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_STAT", server=server) @property def inputs(self): @@ -100,301 +276,327 @@ def inputs(self): Returns -------- - inputs : InputsContactStatus + inputs : InputsContactStatus """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactStatus + outputs : OutputsContactStatus """ return super().outputs -#internal name: ECT_STAT -#scripting name: contact_status class InputsContactStatus(_Inputs): - """Intermediate class used to connect user inputs to contact_status operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_status() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_status operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_status() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_status._spec().inputs, op) - self._time_scoping = Input(contact_status._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(contact_status._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_status._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(contact_status._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_status._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(contact_status._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_status._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(contact_status._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_status._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(contact_status._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_status._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_status._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_status._spec().input_pin(7), 7, op, -1) + self._mesh = Input(contact_status._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(contact_status._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(contact_status._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_status._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(contact_status._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(contact_status._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_status() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactStatus(_Outputs): - """Intermediate class used to get outputs from contact_status operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_status() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_status operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_status() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_status._spec().outputs, op) - self._fields_container = Output(contact_status._spec().output_pin(0), 0, op) + self._fields_container = Output(contact_status._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_status() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/contact_surface_heat_flux.py b/ansys/dpf/core/operators/result/contact_surface_heat_flux.py index 6e474f98311..6d7254e3c7f 100644 --- a/ansys/dpf/core/operators/result/contact_surface_heat_flux.py +++ b/ansys/dpf/core/operators/result/contact_surface_heat_flux.py @@ -1,98 +1,274 @@ """ contact_surface_heat_flux -========================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_surface_heat_flux(Operator): - """Read/compute element total heat flux at contact surface by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_surface_heat_flux() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_surface_heat_flux(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_FLUX", config = config, server = server) + """Read/compute element total heat flux at contact surface by calling the + readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_surface_heat_flux() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_surface_heat_flux( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_FLUX", config=config, server=server) self._inputs = InputsContactSurfaceHeatFlux(self) self._outputs = OutputsContactSurfaceHeatFlux(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element total heat flux at contact surface by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element total heat flux at contact surface by calling the + readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_FLUX") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_FLUX", server=server) @property def inputs(self): @@ -100,301 +276,345 @@ def inputs(self): Returns -------- - inputs : InputsContactSurfaceHeatFlux + inputs : InputsContactSurfaceHeatFlux """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactSurfaceHeatFlux + outputs : OutputsContactSurfaceHeatFlux """ return super().outputs -#internal name: ECT_FLUX -#scripting name: contact_surface_heat_flux class InputsContactSurfaceHeatFlux(_Inputs): - """Intermediate class used to connect user inputs to contact_surface_heat_flux operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_surface_heat_flux() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_surface_heat_flux operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_surface_heat_flux() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_surface_heat_flux._spec().inputs, op) - self._time_scoping = Input(contact_surface_heat_flux._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + contact_surface_heat_flux._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_surface_heat_flux._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + contact_surface_heat_flux._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_surface_heat_flux._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + contact_surface_heat_flux._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_surface_heat_flux._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + contact_surface_heat_flux._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_surface_heat_flux._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + contact_surface_heat_flux._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_surface_heat_flux._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_surface_heat_flux._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_surface_heat_flux._spec().input_pin(7), 7, op, -1) + self._mesh = Input(contact_surface_heat_flux._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(contact_surface_heat_flux._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + contact_surface_heat_flux._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_surface_heat_flux._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + contact_surface_heat_flux._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + contact_surface_heat_flux._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_surface_heat_flux() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactSurfaceHeatFlux(_Outputs): - """Intermediate class used to get outputs from contact_surface_heat_flux operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_surface_heat_flux() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_surface_heat_flux operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_surface_heat_flux() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_surface_heat_flux._spec().outputs, op) - self._fields_container = Output(contact_surface_heat_flux._spec().output_pin(0), 0, op) + self._fields_container = Output( + contact_surface_heat_flux._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_surface_heat_flux() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/contact_total_stress.py b/ansys/dpf/core/operators/result/contact_total_stress.py index e0b15a6fce3..949ee76e3c8 100644 --- a/ansys/dpf/core/operators/result/contact_total_stress.py +++ b/ansys/dpf/core/operators/result/contact_total_stress.py @@ -1,98 +1,274 @@ """ contact_total_stress -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class contact_total_stress(Operator): - """Read/compute element contact total stress (pressure plus friction) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.contact_total_stress() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.contact_total_stress(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_STOT", config = config, server = server) + """Read/compute element contact total stress (pressure plus friction) by + calling the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.contact_total_stress() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.contact_total_stress( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_STOT", config=config, server=server) self._inputs = InputsContactTotalStress(self) self._outputs = OutputsContactTotalStress(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element contact total stress (pressure plus friction) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element contact total stress (pressure plus friction) by + calling the readers defined by the datasources. Regarding + the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_STOT") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_STOT", server=server) @property def inputs(self): @@ -100,301 +276,337 @@ def inputs(self): Returns -------- - inputs : InputsContactTotalStress + inputs : InputsContactTotalStress """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsContactTotalStress + outputs : OutputsContactTotalStress """ return super().outputs -#internal name: ECT_STOT -#scripting name: contact_total_stress class InputsContactTotalStress(_Inputs): - """Intermediate class used to connect user inputs to contact_total_stress operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_total_stress() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + contact_total_stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_total_stress() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(contact_total_stress._spec().inputs, op) - self._time_scoping = Input(contact_total_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(contact_total_stress._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(contact_total_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(contact_total_stress._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(contact_total_stress._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + contact_total_stress._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(contact_total_stress._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + contact_total_stress._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(contact_total_stress._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(contact_total_stress._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(contact_total_stress._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + contact_total_stress._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(contact_total_stress._spec().input_pin(7), 7, op, -1) + self._mesh = Input(contact_total_stress._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(contact_total_stress._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + contact_total_stress._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(contact_total_stress._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + contact_total_stress._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input(contact_total_stress._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_total_stress() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsContactTotalStress(_Outputs): - """Intermediate class used to get outputs from contact_total_stress operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.contact_total_stress() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + contact_total_stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.contact_total_stress() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(contact_total_stress._spec().outputs, op) - self._fields_container = Output(contact_total_stress._spec().output_pin(0), 0, op) + self._fields_container = Output( + contact_total_stress._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.contact_total_stress() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/coordinate_system.py b/ansys/dpf/core/operators/result/coordinate_system.py new file mode 100644 index 00000000000..a0a234f041e --- /dev/null +++ b/ansys/dpf/core/operators/result/coordinate_system.py @@ -0,0 +1,258 @@ +""" +coordinate_system +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class coordinate_system(Operator): + """Extracts the Rotation Matrix and Origin of a specific coordinate + system + + Parameters + ---------- + cs_id : int + streams_container : StreamsContainer, optional + data_sources : DataSources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.coordinate_system() + + >>> # Make input connections + >>> my_cs_id = int() + >>> op.inputs.cs_id.connect(my_cs_id) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.coordinate_system( + ... cs_id=my_cs_id, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + cs_id=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::CS", config=config, server=server) + self._inputs = InputsCoordinateSystem(self) + self._outputs = OutputsCoordinateSystem(self) + if cs_id is not None: + self.inputs.cs_id.connect(cs_id) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + + @staticmethod + def _spec(): + description = """Extracts the Rotation Matrix and Origin of a specific coordinate + system""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="cs_id", + type_names=["int32"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""The first 9 double are the rotation (3x3 + matrix) and the last 3 is the + translation vector""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::CS", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsCoordinateSystem + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsCoordinateSystem + """ + return super().outputs + + +class InputsCoordinateSystem(_Inputs): + """Intermediate class used to connect user inputs to + coordinate_system operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.coordinate_system() + >>> my_cs_id = int() + >>> op.inputs.cs_id.connect(my_cs_id) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + """ + + def __init__(self, op: Operator): + super().__init__(coordinate_system._spec().inputs, op) + self._cs_id = Input(coordinate_system._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._cs_id) + self._streams_container = Input( + coordinate_system._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(coordinate_system._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + + @property + def cs_id(self): + """Allows to connect cs_id input to the operator. + + Parameters + ---------- + my_cs_id : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.coordinate_system() + >>> op.inputs.cs_id.connect(my_cs_id) + >>> # or + >>> op.inputs.cs_id(my_cs_id) + """ + return self._cs_id + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.coordinate_system() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.coordinate_system() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + +class OutputsCoordinateSystem(_Outputs): + """Intermediate class used to get outputs from + coordinate_system operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.coordinate_system() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(coordinate_system._spec().outputs, op) + self._field = Output(coordinate_system._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.coordinate_system() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/result/creep_strain_energy_density.py b/ansys/dpf/core/operators/result/creep_strain_energy_density.py index 2b9db5163c2..0b99210ff66 100644 --- a/ansys/dpf/core/operators/result/creep_strain_energy_density.py +++ b/ansys/dpf/core/operators/result/creep_strain_energy_density.py @@ -1,98 +1,274 @@ """ creep_strain_energy_density -=========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class creep_strain_energy_density(Operator): - """Read/compute element nodal creep strain energy density by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.creep_strain_energy_density() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.creep_strain_energy_density(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_CRWK", config = config, server = server) + """Read/compute element nodal creep strain energy density by calling the + readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.creep_strain_energy_density() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.creep_strain_energy_density( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_CRWK", config=config, server=server) self._inputs = InputsCreepStrainEnergyDensity(self) self._outputs = OutputsCreepStrainEnergyDensity(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal creep strain energy density by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal creep strain energy density by calling the + readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_CRWK") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_CRWK", server=server) @property def inputs(self): @@ -100,301 +276,345 @@ def inputs(self): Returns -------- - inputs : InputsCreepStrainEnergyDensity + inputs : InputsCreepStrainEnergyDensity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCreepStrainEnergyDensity + outputs : OutputsCreepStrainEnergyDensity """ return super().outputs -#internal name: ENL_CRWK -#scripting name: creep_strain_energy_density class InputsCreepStrainEnergyDensity(_Inputs): - """Intermediate class used to connect user inputs to creep_strain_energy_density operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.creep_strain_energy_density() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + creep_strain_energy_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.creep_strain_energy_density() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(creep_strain_energy_density._spec().inputs, op) - self._time_scoping = Input(creep_strain_energy_density._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + creep_strain_energy_density._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(creep_strain_energy_density._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + creep_strain_energy_density._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(creep_strain_energy_density._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + creep_strain_energy_density._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(creep_strain_energy_density._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + creep_strain_energy_density._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(creep_strain_energy_density._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + creep_strain_energy_density._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(creep_strain_energy_density._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + creep_strain_energy_density._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(creep_strain_energy_density._spec().input_pin(7), 7, op, -1) + self._mesh = Input(creep_strain_energy_density._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(creep_strain_energy_density._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + creep_strain_energy_density._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(creep_strain_energy_density._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + creep_strain_energy_density._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + creep_strain_energy_density._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.creep_strain_energy_density() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsCreepStrainEnergyDensity(_Outputs): - """Intermediate class used to get outputs from creep_strain_energy_density operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.creep_strain_energy_density() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + creep_strain_energy_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.creep_strain_energy_density() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(creep_strain_energy_density._spec().outputs, op) - self._fields_container = Output(creep_strain_energy_density._spec().output_pin(0), 0, op) + self._fields_container = Output( + creep_strain_energy_density._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.creep_strain_energy_density() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/current_density.py b/ansys/dpf/core/operators/result/current_density.py new file mode 100644 index 00000000000..89a1ec41206 --- /dev/null +++ b/ansys/dpf/core/operators/result/current_density.py @@ -0,0 +1,516 @@ +""" +current_density +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class current_density(Operator): + """Read/compute Current Density by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.current_density() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.current_density( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ECD", config=config, server=server) + self._inputs = InputsCurrentDensity(self) + self._outputs = OutputsCurrentDensity(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute Current Density by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECD", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsCurrentDensity + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsCurrentDensity + """ + return super().outputs + + +class InputsCurrentDensity(_Inputs): + """Intermediate class used to connect user inputs to + current_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(current_density._spec().inputs, op) + self._time_scoping = Input(current_density._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input(current_density._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input(current_density._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._fields_container) + self._streams_container = Input(current_density._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._streams_container) + self._data_sources = Input(current_density._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + current_density._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(current_density._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input(current_density._spec().input_pin(14), 14, op, -1) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsCurrentDensity(_Outputs): + """Intermediate class used to get outputs from + current_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(current_density._spec().outputs, op) + self._fields_container = Output(current_density._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.current_density() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/custom.py b/ansys/dpf/core/operators/result/custom.py index 1320ce392a0..a3d443cf6b0 100644 --- a/ansys/dpf/core/operators/result/custom.py +++ b/ansys/dpf/core/operators/result/custom.py @@ -1,92 +1,238 @@ """ custom -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class custom(Operator): - """Read/compute user defined result by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.custom() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.custom(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="custom", config = config, server = server) + """Read/compute user defined result by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.custom() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.custom( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="custom", config=config, server=server) self._inputs = InputsCustom(self) self._outputs = OutputsCustom(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute user defined result by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute user defined result by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "custom") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="custom", server=server) @property def inputs(self): @@ -94,275 +240,275 @@ def inputs(self): Returns -------- - inputs : InputsCustom + inputs : InputsCustom """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCustom + outputs : OutputsCustom """ return super().outputs -#internal name: custom -#scripting name: custom class InputsCustom(_Inputs): - """Intermediate class used to connect user inputs to custom operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.custom() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + custom operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.custom() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(custom._spec().inputs, op) - self._time_scoping = Input(custom._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(custom._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(custom._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(custom._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(custom._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(custom._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(custom._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(custom._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(custom._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(custom._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(custom._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(custom._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(custom._spec().input_pin(7), 7, op, -1) + self._mesh = Input(custom._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(custom._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(custom._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsCustom(_Outputs): - """Intermediate class used to get outputs from custom operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.custom() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + custom operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.custom() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(custom._spec().outputs, op) - self._fields_container = Output(custom._spec().output_pin(0), 0, op) + self._fields_container = Output(custom._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.custom() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py b/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py index b95c3105d25..c5778c76364 100644 --- a/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py +++ b/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py @@ -1,84 +1,156 @@ """ cyclic_analytic_seqv_max -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "result" category -""" class cyclic_analytic_seqv_max(Operator): - """Compute the maximum of the Von Mises equivalent stress that can be expected on 360 degrees - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) - - bool_rotate_to_global (bool) (optional) - - cyclic_support (CyclicSupport) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_analytic_seqv_max(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,bool_rotate_to_global=my_bool_rotate_to_global,cyclic_support=my_cyclic_support) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, bool_rotate_to_global=None, cyclic_support=None, config=None, server=None): - super().__init__(name="cyclic_analytic_stress_eqv_max", config = config, server = server) + """Compute the maximum of the Von Mises equivalent stress that can be + expected on 360 degrees + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer + Field container with the base and duplicate + sectors + bool_rotate_to_global : bool, optional + Default is true + cyclic_support : CyclicSupport + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_analytic_seqv_max() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_analytic_seqv_max( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... cyclic_support=my_cyclic_support, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + bool_rotate_to_global=None, + cyclic_support=None, + config=None, + server=None, + ): + super().__init__( + name="cyclic_analytic_stress_eqv_max", config=config, server=server + ) self._inputs = InputsCyclicAnalyticSeqvMax(self) self._outputs = OutputsCyclicAnalyticSeqvMax(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) @staticmethod def _spec(): - spec = Specification(description="""Compute the maximum of the Von Mises equivalent stress that can be expected on 360 degrees""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field container with the base and duplicate sectors"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Compute the maximum of the Von Mises equivalent stress that can be + expected on 360 degrees""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field container with the base and duplicate + sectors""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cyclic_analytic_stress_eqv_max") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="cyclic_analytic_stress_eqv_max", server=server + ) @property def inputs(self): @@ -86,193 +158,192 @@ def inputs(self): Returns -------- - inputs : InputsCyclicAnalyticSeqvMax + inputs : InputsCyclicAnalyticSeqvMax """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicAnalyticSeqvMax + outputs : OutputsCyclicAnalyticSeqvMax """ return super().outputs -#internal name: cyclic_analytic_stress_eqv_max -#scripting name: cyclic_analytic_seqv_max class InputsCyclicAnalyticSeqvMax(_Inputs): - """Intermediate class used to connect user inputs to cyclic_analytic_seqv_max operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) + """Intermediate class used to connect user inputs to + cyclic_analytic_seqv_max operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_analytic_seqv_max() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) """ + def __init__(self, op: Operator): super().__init__(cyclic_analytic_seqv_max._spec().inputs, op) - self._time_scoping = Input(cyclic_analytic_seqv_max._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + cyclic_analytic_seqv_max._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_analytic_seqv_max._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + cyclic_analytic_seqv_max._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_analytic_seqv_max._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_analytic_seqv_max._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._bool_rotate_to_global = Input(cyclic_analytic_seqv_max._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_analytic_seqv_max._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._cyclic_support = Input(cyclic_analytic_seqv_max._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_analytic_seqv_max._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field container with the base and duplicate sectors + Field container with the base and duplicate + sectors Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support + class OutputsCyclicAnalyticSeqvMax(_Outputs): - """Intermediate class used to get outputs from cyclic_analytic_seqv_max operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cyclic_analytic_seqv_max operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_analytic_seqv_max() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cyclic_analytic_seqv_max._spec().outputs, op) - self._fields_container = Output(cyclic_analytic_seqv_max._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_analytic_seqv_max._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_seqv_max() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py b/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py index 52d61663764..2d0b679ef29 100644 --- a/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py +++ b/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py @@ -1,84 +1,152 @@ """ cyclic_analytic_usum_max -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "result" category -""" class cyclic_analytic_usum_max(Operator): - """Compute the maximum of the total deformation that can be expected on 360 degrees - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) - - bool_rotate_to_global (bool) (optional) - - cyclic_support (CyclicSupport) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_analytic_usum_max() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_analytic_usum_max(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,bool_rotate_to_global=my_bool_rotate_to_global,cyclic_support=my_cyclic_support) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, bool_rotate_to_global=None, cyclic_support=None, config=None, server=None): - super().__init__(name="cyclic_analytic_usum_max", config = config, server = server) + """Compute the maximum of the total deformation that can be expected on + 360 degrees + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer + Field container with the base and duplicate + sectors + bool_rotate_to_global : bool, optional + Default is true + cyclic_support : CyclicSupport + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_analytic_usum_max() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_analytic_usum_max( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... cyclic_support=my_cyclic_support, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + bool_rotate_to_global=None, + cyclic_support=None, + config=None, + server=None, + ): + super().__init__(name="cyclic_analytic_usum_max", config=config, server=server) self._inputs = InputsCyclicAnalyticUsumMax(self) self._outputs = OutputsCyclicAnalyticUsumMax(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) @staticmethod def _spec(): - spec = Specification(description="""Compute the maximum of the total deformation that can be expected on 360 degrees""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field container with the base and duplicate sectors"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Compute the maximum of the total deformation that can be expected on + 360 degrees""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field container with the base and duplicate + sectors""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cyclic_analytic_usum_max") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cyclic_analytic_usum_max", server=server) @property def inputs(self): @@ -86,193 +154,192 @@ def inputs(self): Returns -------- - inputs : InputsCyclicAnalyticUsumMax + inputs : InputsCyclicAnalyticUsumMax """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicAnalyticUsumMax + outputs : OutputsCyclicAnalyticUsumMax """ return super().outputs -#internal name: cyclic_analytic_usum_max -#scripting name: cyclic_analytic_usum_max class InputsCyclicAnalyticUsumMax(_Inputs): - """Intermediate class used to connect user inputs to cyclic_analytic_usum_max operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_analytic_usum_max() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) + """Intermediate class used to connect user inputs to + cyclic_analytic_usum_max operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_analytic_usum_max() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) """ + def __init__(self, op: Operator): super().__init__(cyclic_analytic_usum_max._spec().inputs, op) - self._time_scoping = Input(cyclic_analytic_usum_max._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + cyclic_analytic_usum_max._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_analytic_usum_max._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + cyclic_analytic_usum_max._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_analytic_usum_max._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_analytic_usum_max._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._bool_rotate_to_global = Input(cyclic_analytic_usum_max._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_analytic_usum_max._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._cyclic_support = Input(cyclic_analytic_usum_max._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_analytic_usum_max._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_usum_max() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_usum_max() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field container with the base and duplicate sectors + Field container with the base and duplicate + sectors Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_usum_max() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_usum_max() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_usum_max() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support + class OutputsCyclicAnalyticUsumMax(_Outputs): - """Intermediate class used to get outputs from cyclic_analytic_usum_max operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_analytic_usum_max() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cyclic_analytic_usum_max operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_analytic_usum_max() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cyclic_analytic_usum_max._spec().outputs, op) - self._fields_container = Output(cyclic_analytic_usum_max._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_analytic_usum_max._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_analytic_usum_max() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/cyclic_expanded_acceleration.py b/ansys/dpf/core/operators/result/cyclic_expanded_acceleration.py index d32c06a8fbf..00c7526c7b7 100644 --- a/ansys/dpf/core/operators/result/cyclic_expanded_acceleration.py +++ b/ansys/dpf/core/operators/result/cyclic_expanded_acceleration.py @@ -1,135 +1,283 @@ """ cyclic_expanded_acceleration -============================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cyclic_expanded_acceleration(Operator): """Read acceleration from an rst file and expand it with cyclic symmetry. - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - sector_mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) (optional) - - sectors_to_expand (list, Scoping, ScopingsContainer) (optional) - - phi (float) (optional) - - available outputs: - - fields_container (FieldsContainer) - - expanded_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_expanded_acceleration() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_expanded_acceleration(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,bool_rotate_to_global=my_bool_rotate_to_global,sector_mesh=my_sector_mesh,requested_location=my_requested_location,read_cyclic=my_read_cyclic,expanded_meshed_region=my_expanded_meshed_region,cyclic_support=my_cyclic_support,sectors_to_expand=my_sectors_to_expand,phi=my_phi) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, bool_rotate_to_global=None, sector_mesh=None, requested_location=None, read_cyclic=None, expanded_meshed_region=None, cyclic_support=None, sectors_to_expand=None, phi=None, config=None, server=None): - super().__init__(name="mapdl::rst::A_cyclic", config = config, server = server) + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + bool_rotate_to_global : bool, optional + Default is true + sector_mesh : MeshedRegion or MeshesContainer, optional + Mesh of the base sector (can be a skin). + requested_location : str, optional + Location needed in output + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh expanded. + cyclic_support : CyclicSupport, optional + sectors_to_expand : Scoping or ScopingsContainer, optional + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. + phi : float, optional + Angle phi (default value 0.0) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_expanded_acceleration() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_expanded_acceleration( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... sector_mesh=my_sector_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... expanded_meshed_region=my_expanded_meshed_region, + ... cyclic_support=my_cyclic_support, + ... sectors_to_expand=my_sectors_to_expand, + ... phi=my_phi, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + sector_mesh=None, + requested_location=None, + read_cyclic=None, + expanded_meshed_region=None, + cyclic_support=None, + sectors_to_expand=None, + phi=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::A_cyclic", config=config, server=server) self._inputs = InputsCyclicExpandedAcceleration(self) self._outputs = OutputsCyclicExpandedAcceleration(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if sector_mesh !=None: + if sector_mesh is not None: self.inputs.sector_mesh.connect(sector_mesh) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) - if read_cyclic !=None: + if read_cyclic is not None: self.inputs.read_cyclic.connect(read_cyclic) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) - if sectors_to_expand !=None: + if sectors_to_expand is not None: self.inputs.sectors_to_expand.connect(sectors_to_expand) - if phi !=None: + if phi is not None: self.inputs.phi.connect(phi) @staticmethod def _spec(): - spec = Specification(description="""Read acceleration from an rst file and expand it with cyclic symmetry.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 7 : PinSpecification(name = "sector_mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the base sector (can be a skin)."""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""location needed in output"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)"""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh expanded."""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=True, document=""""""), - 18 : PinSpecification(name = "sectors_to_expand", type_names=["vector","scoping","scopings_container"], optional=True, document="""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label."""), - 19 : PinSpecification(name = "phi", type_names=["double"], optional=True, document="""angle phi (default value 0.0)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in"""), - 1 : PinSpecification(name = "expanded_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = ( + """Read acceleration from an rst file and expand it with cyclic symmetry.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 7: PinSpecification( + name="sector_mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the base sector (can be a skin).""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Location needed in output""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh expanded.""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=True, + document="""""", + ), + 18: PinSpecification( + name="sectors_to_expand", + type_names=["vector", "scoping", "scopings_container"], + optional=True, + document="""Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label.""", + ), + 19: PinSpecification( + name="phi", + type_names=["double"], + optional=True, + document="""Angle phi (default value 0.0)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + 1: PinSpecification( + name="expanded_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::A_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::A_cyclic", server=server) @property def inputs(self): @@ -137,423 +285,428 @@ def inputs(self): Returns -------- - inputs : InputsCyclicExpandedAcceleration + inputs : InputsCyclicExpandedAcceleration """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicExpandedAcceleration + outputs : OutputsCyclicExpandedAcceleration """ return super().outputs -#internal name: mapdl::rst::A_cyclic -#scripting name: cyclic_expanded_acceleration class InputsCyclicExpandedAcceleration(_Inputs): - """Intermediate class used to connect user inputs to cyclic_expanded_acceleration operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_acceleration() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) + """Intermediate class used to connect user inputs to + cyclic_expanded_acceleration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_acceleration() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_acceleration._spec().inputs, op) - self._time_scoping = Input(cyclic_expanded_acceleration._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + cyclic_expanded_acceleration._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_expanded_acceleration._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + cyclic_expanded_acceleration._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_expanded_acceleration._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_expanded_acceleration._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(cyclic_expanded_acceleration._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + cyclic_expanded_acceleration._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_expanded_acceleration._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + cyclic_expanded_acceleration._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(cyclic_expanded_acceleration._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_expanded_acceleration._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._sector_mesh = Input(cyclic_expanded_acceleration._spec().input_pin(7), 7, op, -1) + self._sector_mesh = Input( + cyclic_expanded_acceleration._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._sector_mesh) - self._requested_location = Input(cyclic_expanded_acceleration._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + cyclic_expanded_acceleration._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(cyclic_expanded_acceleration._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + cyclic_expanded_acceleration._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(cyclic_expanded_acceleration._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_expanded_acceleration._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._cyclic_support = Input(cyclic_expanded_acceleration._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_expanded_acceleration._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input(cyclic_expanded_acceleration._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand = Input( + cyclic_expanded_acceleration._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(cyclic_expanded_acceleration._spec().input_pin(19), 19, op, -1) + self._phi = Input( + cyclic_expanded_acceleration._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def sector_mesh(self): - """Allows to connect sector_mesh input to the operator + """Allows to connect sector_mesh input to the operator. - - pindoc: mesh of the base sector (can be a skin). + Mesh of the base sector (can be a skin). Parameters ---------- - my_sector_mesh : MeshedRegion, MeshesContainer, + my_sector_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> #or + >>> # or >>> op.inputs.sector_mesh(my_sector_mesh) - """ return self._sector_mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: location needed in output + Location needed in output Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: mesh expanded. + Mesh expanded. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support @property def sectors_to_expand(self): - """Allows to connect sectors_to_expand input to the operator + """Allows to connect sectors_to_expand input to the operator. - - pindoc: sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. Parameters ---------- - my_sectors_to_expand : list, Scoping, ScopingsContainer, + my_sectors_to_expand : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> #or + >>> # or >>> op.inputs.sectors_to_expand(my_sectors_to_expand) - """ return self._sectors_to_expand @property def phi(self): - """Allows to connect phi input to the operator + """Allows to connect phi input to the operator. - - pindoc: angle phi (default value 0.0) + Angle phi (default value 0.0) Parameters ---------- - my_phi : float, + my_phi : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> op.inputs.phi.connect(my_phi) - >>> #or + >>> # or >>> op.inputs.phi(my_phi) - """ return self._phi + class OutputsCyclicExpandedAcceleration(_Outputs): - """Intermediate class used to get outputs from cyclic_expanded_acceleration operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_acceleration() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes() + """Intermediate class used to get outputs from + cyclic_expanded_acceleration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_acceleration() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_acceleration._spec().outputs, op) - self._fields_container = Output(cyclic_expanded_acceleration._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_expanded_acceleration._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) - self._expanded_meshes = Output(cyclic_expanded_acceleration._spec().output_pin(1), 1, op) + self._expanded_meshes = Output( + cyclic_expanded_acceleration._spec().output_pin(1), 1, op + ) self._outputs.append(self._expanded_meshes) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def expanded_meshes(self): """Allows to get expanded_meshes output of the operator - Returns ---------- - my_expanded_meshes : MeshesContainer, + my_expanded_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_acceleration() >>> # Connect inputs : op.inputs. ... - >>> result_expanded_meshes = op.outputs.expanded_meshes() - """ + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ # noqa: E501 return self._expanded_meshes - diff --git a/ansys/dpf/core/operators/result/cyclic_expanded_displacement.py b/ansys/dpf/core/operators/result/cyclic_expanded_displacement.py index fd0669644a8..f0cf99ce04d 100644 --- a/ansys/dpf/core/operators/result/cyclic_expanded_displacement.py +++ b/ansys/dpf/core/operators/result/cyclic_expanded_displacement.py @@ -1,135 +1,283 @@ """ cyclic_expanded_displacement -============================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cyclic_expanded_displacement(Operator): - """Read displacements from an rst file and expand it with cyclic symmetry. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - sector_mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) (optional) - - sectors_to_expand (list, Scoping, ScopingsContainer) (optional) - - phi (float) (optional) - - available outputs: - - fields_container (FieldsContainer) - - expanded_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_expanded_displacement() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_expanded_displacement(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,bool_rotate_to_global=my_bool_rotate_to_global,sector_mesh=my_sector_mesh,requested_location=my_requested_location,read_cyclic=my_read_cyclic,expanded_meshed_region=my_expanded_meshed_region,cyclic_support=my_cyclic_support,sectors_to_expand=my_sectors_to_expand,phi=my_phi) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, bool_rotate_to_global=None, sector_mesh=None, requested_location=None, read_cyclic=None, expanded_meshed_region=None, cyclic_support=None, sectors_to_expand=None, phi=None, config=None, server=None): - super().__init__(name="mapdl::rst::U_cyclic", config = config, server = server) + """Read displacements from an rst file and expand it with cyclic + symmetry. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + bool_rotate_to_global : bool, optional + Default is true + sector_mesh : MeshedRegion or MeshesContainer, optional + Mesh of the base sector (can be a skin). + requested_location : str, optional + Location needed in output + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh expanded. + cyclic_support : CyclicSupport, optional + sectors_to_expand : Scoping or ScopingsContainer, optional + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. + phi : float, optional + Angle phi (default value 0.0) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_expanded_displacement() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_expanded_displacement( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... sector_mesh=my_sector_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... expanded_meshed_region=my_expanded_meshed_region, + ... cyclic_support=my_cyclic_support, + ... sectors_to_expand=my_sectors_to_expand, + ... phi=my_phi, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + sector_mesh=None, + requested_location=None, + read_cyclic=None, + expanded_meshed_region=None, + cyclic_support=None, + sectors_to_expand=None, + phi=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::U_cyclic", config=config, server=server) self._inputs = InputsCyclicExpandedDisplacement(self) self._outputs = OutputsCyclicExpandedDisplacement(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if sector_mesh !=None: + if sector_mesh is not None: self.inputs.sector_mesh.connect(sector_mesh) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) - if read_cyclic !=None: + if read_cyclic is not None: self.inputs.read_cyclic.connect(read_cyclic) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) - if sectors_to_expand !=None: + if sectors_to_expand is not None: self.inputs.sectors_to_expand.connect(sectors_to_expand) - if phi !=None: + if phi is not None: self.inputs.phi.connect(phi) @staticmethod def _spec(): - spec = Specification(description="""Read displacements from an rst file and expand it with cyclic symmetry.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 7 : PinSpecification(name = "sector_mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the base sector (can be a skin)."""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""location needed in output"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)"""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh expanded."""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=True, document=""""""), - 18 : PinSpecification(name = "sectors_to_expand", type_names=["vector","scoping","scopings_container"], optional=True, document="""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label."""), - 19 : PinSpecification(name = "phi", type_names=["double"], optional=True, document="""angle phi (default value 0.0)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in"""), - 1 : PinSpecification(name = "expanded_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = """Read displacements from an rst file and expand it with cyclic + symmetry.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 7: PinSpecification( + name="sector_mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the base sector (can be a skin).""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Location needed in output""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh expanded.""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=True, + document="""""", + ), + 18: PinSpecification( + name="sectors_to_expand", + type_names=["vector", "scoping", "scopings_container"], + optional=True, + document="""Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label.""", + ), + 19: PinSpecification( + name="phi", + type_names=["double"], + optional=True, + document="""Angle phi (default value 0.0)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + 1: PinSpecification( + name="expanded_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::U_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::U_cyclic", server=server) @property def inputs(self): @@ -137,423 +285,428 @@ def inputs(self): Returns -------- - inputs : InputsCyclicExpandedDisplacement + inputs : InputsCyclicExpandedDisplacement """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicExpandedDisplacement + outputs : OutputsCyclicExpandedDisplacement """ return super().outputs -#internal name: mapdl::rst::U_cyclic -#scripting name: cyclic_expanded_displacement class InputsCyclicExpandedDisplacement(_Inputs): - """Intermediate class used to connect user inputs to cyclic_expanded_displacement operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_displacement() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) + """Intermediate class used to connect user inputs to + cyclic_expanded_displacement operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_displacement() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_displacement._spec().inputs, op) - self._time_scoping = Input(cyclic_expanded_displacement._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + cyclic_expanded_displacement._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_expanded_displacement._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + cyclic_expanded_displacement._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_expanded_displacement._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_expanded_displacement._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(cyclic_expanded_displacement._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + cyclic_expanded_displacement._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_expanded_displacement._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + cyclic_expanded_displacement._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(cyclic_expanded_displacement._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_expanded_displacement._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._sector_mesh = Input(cyclic_expanded_displacement._spec().input_pin(7), 7, op, -1) + self._sector_mesh = Input( + cyclic_expanded_displacement._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._sector_mesh) - self._requested_location = Input(cyclic_expanded_displacement._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + cyclic_expanded_displacement._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(cyclic_expanded_displacement._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + cyclic_expanded_displacement._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(cyclic_expanded_displacement._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_expanded_displacement._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._cyclic_support = Input(cyclic_expanded_displacement._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_expanded_displacement._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input(cyclic_expanded_displacement._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand = Input( + cyclic_expanded_displacement._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(cyclic_expanded_displacement._spec().input_pin(19), 19, op, -1) + self._phi = Input( + cyclic_expanded_displacement._spec().input_pin(19), 19, op, -1 + ) self._inputs.append(self._phi) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def sector_mesh(self): - """Allows to connect sector_mesh input to the operator + """Allows to connect sector_mesh input to the operator. - - pindoc: mesh of the base sector (can be a skin). + Mesh of the base sector (can be a skin). Parameters ---------- - my_sector_mesh : MeshedRegion, MeshesContainer, + my_sector_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> #or + >>> # or >>> op.inputs.sector_mesh(my_sector_mesh) - """ return self._sector_mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: location needed in output + Location needed in output Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: mesh expanded. + Mesh expanded. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support @property def sectors_to_expand(self): - """Allows to connect sectors_to_expand input to the operator + """Allows to connect sectors_to_expand input to the operator. - - pindoc: sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. Parameters ---------- - my_sectors_to_expand : list, Scoping, ScopingsContainer, + my_sectors_to_expand : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> #or + >>> # or >>> op.inputs.sectors_to_expand(my_sectors_to_expand) - """ return self._sectors_to_expand @property def phi(self): - """Allows to connect phi input to the operator + """Allows to connect phi input to the operator. - - pindoc: angle phi (default value 0.0) + Angle phi (default value 0.0) Parameters ---------- - my_phi : float, + my_phi : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> op.inputs.phi.connect(my_phi) - >>> #or + >>> # or >>> op.inputs.phi(my_phi) - """ return self._phi + class OutputsCyclicExpandedDisplacement(_Outputs): - """Intermediate class used to get outputs from cyclic_expanded_displacement operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_displacement() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes() + """Intermediate class used to get outputs from + cyclic_expanded_displacement operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_displacement() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_displacement._spec().outputs, op) - self._fields_container = Output(cyclic_expanded_displacement._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_expanded_displacement._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) - self._expanded_meshes = Output(cyclic_expanded_displacement._spec().output_pin(1), 1, op) + self._expanded_meshes = Output( + cyclic_expanded_displacement._spec().output_pin(1), 1, op + ) self._outputs.append(self._expanded_meshes) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def expanded_meshes(self): """Allows to get expanded_meshes output of the operator - Returns ---------- - my_expanded_meshes : MeshesContainer, + my_expanded_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_displacement() >>> # Connect inputs : op.inputs. ... - >>> result_expanded_meshes = op.outputs.expanded_meshes() - """ + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ # noqa: E501 return self._expanded_meshes - diff --git a/ansys/dpf/core/operators/result/cyclic_expanded_el_strain.py b/ansys/dpf/core/operators/result/cyclic_expanded_el_strain.py index 09c1bdb051a..6354701c463 100644 --- a/ansys/dpf/core/operators/result/cyclic_expanded_el_strain.py +++ b/ansys/dpf/core/operators/result/cyclic_expanded_el_strain.py @@ -1,135 +1,283 @@ """ cyclic_expanded_el_strain -========================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cyclic_expanded_el_strain(Operator): - """Read mapdl::rst::EPEL from an rst file and expand it with cyclic symmetry. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - sector_mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) (optional) - - sectors_to_expand (list, Scoping, ScopingsContainer) (optional) - - phi (float) (optional) - - available outputs: - - fields_container (FieldsContainer) - - expanded_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_expanded_el_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_expanded_el_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,bool_rotate_to_global=my_bool_rotate_to_global,sector_mesh=my_sector_mesh,requested_location=my_requested_location,read_cyclic=my_read_cyclic,expanded_meshed_region=my_expanded_meshed_region,cyclic_support=my_cyclic_support,sectors_to_expand=my_sectors_to_expand,phi=my_phi) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, bool_rotate_to_global=None, sector_mesh=None, requested_location=None, read_cyclic=None, expanded_meshed_region=None, cyclic_support=None, sectors_to_expand=None, phi=None, config=None, server=None): - super().__init__(name="mapdl::rst::EPEL_cyclic", config = config, server = server) + """Read mapdl::rst::EPEL from an rst file and expand it with cyclic + symmetry. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + bool_rotate_to_global : bool, optional + Default is true + sector_mesh : MeshedRegion or MeshesContainer, optional + Mesh of the base sector (can be a skin). + requested_location : str, optional + Location needed in output + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh expanded. + cyclic_support : CyclicSupport, optional + sectors_to_expand : Scoping or ScopingsContainer, optional + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. + phi : float, optional + Phi angle (default value 0.0) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_expanded_el_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_expanded_el_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... sector_mesh=my_sector_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... expanded_meshed_region=my_expanded_meshed_region, + ... cyclic_support=my_cyclic_support, + ... sectors_to_expand=my_sectors_to_expand, + ... phi=my_phi, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + sector_mesh=None, + requested_location=None, + read_cyclic=None, + expanded_meshed_region=None, + cyclic_support=None, + sectors_to_expand=None, + phi=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::EPEL_cyclic", config=config, server=server) self._inputs = InputsCyclicExpandedElStrain(self) self._outputs = OutputsCyclicExpandedElStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if sector_mesh !=None: + if sector_mesh is not None: self.inputs.sector_mesh.connect(sector_mesh) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) - if read_cyclic !=None: + if read_cyclic is not None: self.inputs.read_cyclic.connect(read_cyclic) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) - if sectors_to_expand !=None: + if sectors_to_expand is not None: self.inputs.sectors_to_expand.connect(sectors_to_expand) - if phi !=None: + if phi is not None: self.inputs.phi.connect(phi) @staticmethod def _spec(): - spec = Specification(description="""Read mapdl::rst::EPEL from an rst file and expand it with cyclic symmetry.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 7 : PinSpecification(name = "sector_mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the base sector (can be a skin)."""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""location needed in output"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)"""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh expanded."""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=True, document=""""""), - 18 : PinSpecification(name = "sectors_to_expand", type_names=["vector","scoping","scopings_container"], optional=True, document="""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label."""), - 19 : PinSpecification(name = "phi", type_names=["double"], optional=True, document="""phi angle (default value 0.0)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in"""), - 1 : PinSpecification(name = "expanded_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = """Read mapdl::rst::EPEL from an rst file and expand it with cyclic + symmetry.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 7: PinSpecification( + name="sector_mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the base sector (can be a skin).""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Location needed in output""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh expanded.""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=True, + document="""""", + ), + 18: PinSpecification( + name="sectors_to_expand", + type_names=["vector", "scoping", "scopings_container"], + optional=True, + document="""Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label.""", + ), + 19: PinSpecification( + name="phi", + type_names=["double"], + optional=True, + document="""Phi angle (default value 0.0)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + 1: PinSpecification( + name="expanded_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::EPEL_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::EPEL_cyclic", server=server) @property def inputs(self): @@ -137,423 +285,426 @@ def inputs(self): Returns -------- - inputs : InputsCyclicExpandedElStrain + inputs : InputsCyclicExpandedElStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicExpandedElStrain + outputs : OutputsCyclicExpandedElStrain """ return super().outputs -#internal name: mapdl::rst::EPEL_cyclic -#scripting name: cyclic_expanded_el_strain class InputsCyclicExpandedElStrain(_Inputs): - """Intermediate class used to connect user inputs to cyclic_expanded_el_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_el_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) + """Intermediate class used to connect user inputs to + cyclic_expanded_el_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_el_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_el_strain._spec().inputs, op) - self._time_scoping = Input(cyclic_expanded_el_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + cyclic_expanded_el_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_expanded_el_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + cyclic_expanded_el_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_expanded_el_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_expanded_el_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(cyclic_expanded_el_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + cyclic_expanded_el_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_expanded_el_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + cyclic_expanded_el_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(cyclic_expanded_el_strain._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_expanded_el_strain._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._sector_mesh = Input(cyclic_expanded_el_strain._spec().input_pin(7), 7, op, -1) + self._sector_mesh = Input( + cyclic_expanded_el_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._sector_mesh) - self._requested_location = Input(cyclic_expanded_el_strain._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + cyclic_expanded_el_strain._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(cyclic_expanded_el_strain._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + cyclic_expanded_el_strain._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(cyclic_expanded_el_strain._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_expanded_el_strain._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._cyclic_support = Input(cyclic_expanded_el_strain._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_expanded_el_strain._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input(cyclic_expanded_el_strain._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand = Input( + cyclic_expanded_el_strain._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(cyclic_expanded_el_strain._spec().input_pin(19), 19, op, -1) + self._phi = Input(cyclic_expanded_el_strain._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def sector_mesh(self): - """Allows to connect sector_mesh input to the operator + """Allows to connect sector_mesh input to the operator. - - pindoc: mesh of the base sector (can be a skin). + Mesh of the base sector (can be a skin). Parameters ---------- - my_sector_mesh : MeshedRegion, MeshesContainer, + my_sector_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> #or + >>> # or >>> op.inputs.sector_mesh(my_sector_mesh) - """ return self._sector_mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: location needed in output + Location needed in output Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: mesh expanded. + Mesh expanded. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support @property def sectors_to_expand(self): - """Allows to connect sectors_to_expand input to the operator + """Allows to connect sectors_to_expand input to the operator. - - pindoc: sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. Parameters ---------- - my_sectors_to_expand : list, Scoping, ScopingsContainer, + my_sectors_to_expand : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> #or + >>> # or >>> op.inputs.sectors_to_expand(my_sectors_to_expand) - """ return self._sectors_to_expand @property def phi(self): - """Allows to connect phi input to the operator + """Allows to connect phi input to the operator. - - pindoc: phi angle (default value 0.0) + Phi angle (default value 0.0) Parameters ---------- - my_phi : float, + my_phi : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> op.inputs.phi.connect(my_phi) - >>> #or + >>> # or >>> op.inputs.phi(my_phi) - """ return self._phi + class OutputsCyclicExpandedElStrain(_Outputs): - """Intermediate class used to get outputs from cyclic_expanded_el_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_el_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes() + """Intermediate class used to get outputs from + cyclic_expanded_el_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_el_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_el_strain._spec().outputs, op) - self._fields_container = Output(cyclic_expanded_el_strain._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_expanded_el_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) - self._expanded_meshes = Output(cyclic_expanded_el_strain._spec().output_pin(1), 1, op) + self._expanded_meshes = Output( + cyclic_expanded_el_strain._spec().output_pin(1), 1, op + ) self._outputs.append(self._expanded_meshes) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def expanded_meshes(self): """Allows to get expanded_meshes output of the operator - Returns ---------- - my_expanded_meshes : MeshesContainer, + my_expanded_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_el_strain() >>> # Connect inputs : op.inputs. ... - >>> result_expanded_meshes = op.outputs.expanded_meshes() - """ + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ # noqa: E501 return self._expanded_meshes - diff --git a/ansys/dpf/core/operators/result/cyclic_expanded_enf.py b/ansys/dpf/core/operators/result/cyclic_expanded_enf.py index 9bc89718460..8a9bdb7cb43 100644 --- a/ansys/dpf/core/operators/result/cyclic_expanded_enf.py +++ b/ansys/dpf/core/operators/result/cyclic_expanded_enf.py @@ -1,135 +1,283 @@ """ cyclic_expanded_enf -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cyclic_expanded_enf(Operator): """Read ENF from an rst file and expand it with cyclic symmetry. - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - sector_mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) (optional) - - sectors_to_expand (list, Scoping, ScopingsContainer) (optional) - - phi (float) (optional) - - available outputs: - - fields_container (FieldsContainer) - - expanded_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_expanded_enf() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_expanded_enf(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,bool_rotate_to_global=my_bool_rotate_to_global,sector_mesh=my_sector_mesh,requested_location=my_requested_location,read_cyclic=my_read_cyclic,expanded_meshed_region=my_expanded_meshed_region,cyclic_support=my_cyclic_support,sectors_to_expand=my_sectors_to_expand,phi=my_phi) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, bool_rotate_to_global=None, sector_mesh=None, requested_location=None, read_cyclic=None, expanded_meshed_region=None, cyclic_support=None, sectors_to_expand=None, phi=None, config=None, server=None): - super().__init__(name="mapdl::rst::ENF_cyclic", config = config, server = server) + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + bool_rotate_to_global : bool, optional + Default is true + sector_mesh : MeshedRegion or MeshesContainer, optional + Mesh of the base sector (can be a skin). + requested_location : str, optional + Location needed in output + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh expanded. + cyclic_support : CyclicSupport, optional + sectors_to_expand : Scoping or ScopingsContainer, optional + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. + phi : float, optional + Phi angle (default value 0.0) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_expanded_enf() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_expanded_enf( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... sector_mesh=my_sector_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... expanded_meshed_region=my_expanded_meshed_region, + ... cyclic_support=my_cyclic_support, + ... sectors_to_expand=my_sectors_to_expand, + ... phi=my_phi, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + sector_mesh=None, + requested_location=None, + read_cyclic=None, + expanded_meshed_region=None, + cyclic_support=None, + sectors_to_expand=None, + phi=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::ENF_cyclic", config=config, server=server) self._inputs = InputsCyclicExpandedEnf(self) self._outputs = OutputsCyclicExpandedEnf(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if sector_mesh !=None: + if sector_mesh is not None: self.inputs.sector_mesh.connect(sector_mesh) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) - if read_cyclic !=None: + if read_cyclic is not None: self.inputs.read_cyclic.connect(read_cyclic) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) - if sectors_to_expand !=None: + if sectors_to_expand is not None: self.inputs.sectors_to_expand.connect(sectors_to_expand) - if phi !=None: + if phi is not None: self.inputs.phi.connect(phi) @staticmethod def _spec(): - spec = Specification(description="""Read ENF from an rst file and expand it with cyclic symmetry.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 7 : PinSpecification(name = "sector_mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the base sector (can be a skin)."""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""location needed in output"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)"""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh expanded."""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=True, document=""""""), - 18 : PinSpecification(name = "sectors_to_expand", type_names=["vector","scoping","scopings_container"], optional=True, document="""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label."""), - 19 : PinSpecification(name = "phi", type_names=["double"], optional=True, document="""phi angle (default value 0.0)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in"""), - 1 : PinSpecification(name = "expanded_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = ( + """Read ENF from an rst file and expand it with cyclic symmetry.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 7: PinSpecification( + name="sector_mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the base sector (can be a skin).""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Location needed in output""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh expanded.""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=True, + document="""""", + ), + 18: PinSpecification( + name="sectors_to_expand", + type_names=["vector", "scoping", "scopings_container"], + optional=True, + document="""Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label.""", + ), + 19: PinSpecification( + name="phi", + type_names=["double"], + optional=True, + document="""Phi angle (default value 0.0)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + 1: PinSpecification( + name="expanded_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::ENF_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::ENF_cyclic", server=server) @property def inputs(self): @@ -137,423 +285,414 @@ def inputs(self): Returns -------- - inputs : InputsCyclicExpandedEnf + inputs : InputsCyclicExpandedEnf """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicExpandedEnf + outputs : OutputsCyclicExpandedEnf """ return super().outputs -#internal name: mapdl::rst::ENF_cyclic -#scripting name: cyclic_expanded_enf class InputsCyclicExpandedEnf(_Inputs): - """Intermediate class used to connect user inputs to cyclic_expanded_enf operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_enf() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) + """Intermediate class used to connect user inputs to + cyclic_expanded_enf operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_enf() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_enf._spec().inputs, op) - self._time_scoping = Input(cyclic_expanded_enf._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(cyclic_expanded_enf._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_expanded_enf._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(cyclic_expanded_enf._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_expanded_enf._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_expanded_enf._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(cyclic_expanded_enf._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + cyclic_expanded_enf._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_expanded_enf._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(cyclic_expanded_enf._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(cyclic_expanded_enf._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_expanded_enf._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._sector_mesh = Input(cyclic_expanded_enf._spec().input_pin(7), 7, op, -1) + self._sector_mesh = Input(cyclic_expanded_enf._spec().input_pin(7), 7, op, -1) self._inputs.append(self._sector_mesh) - self._requested_location = Input(cyclic_expanded_enf._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + cyclic_expanded_enf._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(cyclic_expanded_enf._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(cyclic_expanded_enf._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(cyclic_expanded_enf._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_expanded_enf._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._cyclic_support = Input(cyclic_expanded_enf._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_expanded_enf._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input(cyclic_expanded_enf._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand = Input( + cyclic_expanded_enf._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(cyclic_expanded_enf._spec().input_pin(19), 19, op, -1) + self._phi = Input(cyclic_expanded_enf._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def sector_mesh(self): - """Allows to connect sector_mesh input to the operator + """Allows to connect sector_mesh input to the operator. - - pindoc: mesh of the base sector (can be a skin). + Mesh of the base sector (can be a skin). Parameters ---------- - my_sector_mesh : MeshedRegion, MeshesContainer, + my_sector_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> #or + >>> # or >>> op.inputs.sector_mesh(my_sector_mesh) - """ return self._sector_mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: location needed in output + Location needed in output Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: mesh expanded. + Mesh expanded. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support @property def sectors_to_expand(self): - """Allows to connect sectors_to_expand input to the operator + """Allows to connect sectors_to_expand input to the operator. - - pindoc: sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. Parameters ---------- - my_sectors_to_expand : list, Scoping, ScopingsContainer, + my_sectors_to_expand : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> #or + >>> # or >>> op.inputs.sectors_to_expand(my_sectors_to_expand) - """ return self._sectors_to_expand @property def phi(self): - """Allows to connect phi input to the operator + """Allows to connect phi input to the operator. - - pindoc: phi angle (default value 0.0) + Phi angle (default value 0.0) Parameters ---------- - my_phi : float, + my_phi : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> op.inputs.phi.connect(my_phi) - >>> #or + >>> # or >>> op.inputs.phi(my_phi) - """ return self._phi + class OutputsCyclicExpandedEnf(_Outputs): - """Intermediate class used to get outputs from cyclic_expanded_enf operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_enf() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes() + """Intermediate class used to get outputs from + cyclic_expanded_enf operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_enf() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_enf._spec().outputs, op) - self._fields_container = Output(cyclic_expanded_enf._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_expanded_enf._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) - self._expanded_meshes = Output(cyclic_expanded_enf._spec().output_pin(1), 1, op) + self._expanded_meshes = Output(cyclic_expanded_enf._spec().output_pin(1), 1, op) self._outputs.append(self._expanded_meshes) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def expanded_meshes(self): """Allows to get expanded_meshes output of the operator - Returns ---------- - my_expanded_meshes : MeshesContainer, + my_expanded_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_enf() >>> # Connect inputs : op.inputs. ... - >>> result_expanded_meshes = op.outputs.expanded_meshes() - """ + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ # noqa: E501 return self._expanded_meshes - diff --git a/ansys/dpf/core/operators/result/cyclic_expanded_stress.py b/ansys/dpf/core/operators/result/cyclic_expanded_stress.py index e9674e1b753..8e487d48d0c 100644 --- a/ansys/dpf/core/operators/result/cyclic_expanded_stress.py +++ b/ansys/dpf/core/operators/result/cyclic_expanded_stress.py @@ -1,135 +1,283 @@ """ cyclic_expanded_stress -====================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cyclic_expanded_stress(Operator): - """Read mapdl::rst::S from an rst file and expand it with cyclic symmetry. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - sector_mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) (optional) - - sectors_to_expand (list, Scoping, ScopingsContainer) (optional) - - phi (float) (optional) - - available outputs: - - fields_container (FieldsContainer) - - expanded_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_expanded_stress() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_expanded_stress(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,bool_rotate_to_global=my_bool_rotate_to_global,sector_mesh=my_sector_mesh,requested_location=my_requested_location,read_cyclic=my_read_cyclic,expanded_meshed_region=my_expanded_meshed_region,cyclic_support=my_cyclic_support,sectors_to_expand=my_sectors_to_expand,phi=my_phi) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, bool_rotate_to_global=None, sector_mesh=None, requested_location=None, read_cyclic=None, expanded_meshed_region=None, cyclic_support=None, sectors_to_expand=None, phi=None, config=None, server=None): - super().__init__(name="mapdl::rst::S_cyclic", config = config, server = server) + """Read mapdl::rst::S from an rst file and expand it with cyclic + symmetry. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + bool_rotate_to_global : bool, optional + Default is true + sector_mesh : MeshedRegion or MeshesContainer, optional + Mesh of the base sector (can be a skin). + requested_location : str, optional + Location needed in output + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh expanded. + cyclic_support : CyclicSupport, optional + sectors_to_expand : Scoping or ScopingsContainer, optional + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. + phi : float, optional + Phi angle (default value 0.0) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_expanded_stress() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_expanded_stress( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... sector_mesh=my_sector_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... expanded_meshed_region=my_expanded_meshed_region, + ... cyclic_support=my_cyclic_support, + ... sectors_to_expand=my_sectors_to_expand, + ... phi=my_phi, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + sector_mesh=None, + requested_location=None, + read_cyclic=None, + expanded_meshed_region=None, + cyclic_support=None, + sectors_to_expand=None, + phi=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::S_cyclic", config=config, server=server) self._inputs = InputsCyclicExpandedStress(self) self._outputs = OutputsCyclicExpandedStress(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if sector_mesh !=None: + if sector_mesh is not None: self.inputs.sector_mesh.connect(sector_mesh) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) - if read_cyclic !=None: + if read_cyclic is not None: self.inputs.read_cyclic.connect(read_cyclic) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) - if sectors_to_expand !=None: + if sectors_to_expand is not None: self.inputs.sectors_to_expand.connect(sectors_to_expand) - if phi !=None: + if phi is not None: self.inputs.phi.connect(phi) @staticmethod def _spec(): - spec = Specification(description="""Read mapdl::rst::S from an rst file and expand it with cyclic symmetry.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 7 : PinSpecification(name = "sector_mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the base sector (can be a skin)."""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""location needed in output"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)"""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh expanded."""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=True, document=""""""), - 18 : PinSpecification(name = "sectors_to_expand", type_names=["vector","scoping","scopings_container"], optional=True, document="""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label."""), - 19 : PinSpecification(name = "phi", type_names=["double"], optional=True, document="""phi angle (default value 0.0)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in"""), - 1 : PinSpecification(name = "expanded_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = """Read mapdl::rst::S from an rst file and expand it with cyclic + symmetry.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 7: PinSpecification( + name="sector_mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the base sector (can be a skin).""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Location needed in output""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh expanded.""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=True, + document="""""", + ), + 18: PinSpecification( + name="sectors_to_expand", + type_names=["vector", "scoping", "scopings_container"], + optional=True, + document="""Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label.""", + ), + 19: PinSpecification( + name="phi", + type_names=["double"], + optional=True, + document="""Phi angle (default value 0.0)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + 1: PinSpecification( + name="expanded_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::S_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::S_cyclic", server=server) @property def inputs(self): @@ -137,423 +285,426 @@ def inputs(self): Returns -------- - inputs : InputsCyclicExpandedStress + inputs : InputsCyclicExpandedStress """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicExpandedStress + outputs : OutputsCyclicExpandedStress """ return super().outputs -#internal name: mapdl::rst::S_cyclic -#scripting name: cyclic_expanded_stress class InputsCyclicExpandedStress(_Inputs): - """Intermediate class used to connect user inputs to cyclic_expanded_stress operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_stress() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) + """Intermediate class used to connect user inputs to + cyclic_expanded_stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_stress() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_stress._spec().inputs, op) - self._time_scoping = Input(cyclic_expanded_stress._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + cyclic_expanded_stress._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_expanded_stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + cyclic_expanded_stress._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_expanded_stress._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_expanded_stress._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(cyclic_expanded_stress._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + cyclic_expanded_stress._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_expanded_stress._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + cyclic_expanded_stress._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(cyclic_expanded_stress._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_expanded_stress._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._sector_mesh = Input(cyclic_expanded_stress._spec().input_pin(7), 7, op, -1) + self._sector_mesh = Input( + cyclic_expanded_stress._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._sector_mesh) - self._requested_location = Input(cyclic_expanded_stress._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + cyclic_expanded_stress._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(cyclic_expanded_stress._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + cyclic_expanded_stress._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(cyclic_expanded_stress._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_expanded_stress._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._cyclic_support = Input(cyclic_expanded_stress._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_expanded_stress._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input(cyclic_expanded_stress._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand = Input( + cyclic_expanded_stress._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(cyclic_expanded_stress._spec().input_pin(19), 19, op, -1) + self._phi = Input(cyclic_expanded_stress._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def sector_mesh(self): - """Allows to connect sector_mesh input to the operator + """Allows to connect sector_mesh input to the operator. - - pindoc: mesh of the base sector (can be a skin). + Mesh of the base sector (can be a skin). Parameters ---------- - my_sector_mesh : MeshedRegion, MeshesContainer, + my_sector_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> #or + >>> # or >>> op.inputs.sector_mesh(my_sector_mesh) - """ return self._sector_mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: location needed in output + Location needed in output Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: mesh expanded. + Mesh expanded. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support @property def sectors_to_expand(self): - """Allows to connect sectors_to_expand input to the operator + """Allows to connect sectors_to_expand input to the operator. - - pindoc: sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. Parameters ---------- - my_sectors_to_expand : list, Scoping, ScopingsContainer, + my_sectors_to_expand : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> #or + >>> # or >>> op.inputs.sectors_to_expand(my_sectors_to_expand) - """ return self._sectors_to_expand @property def phi(self): - """Allows to connect phi input to the operator + """Allows to connect phi input to the operator. - - pindoc: phi angle (default value 0.0) + Phi angle (default value 0.0) Parameters ---------- - my_phi : float, + my_phi : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> op.inputs.phi.connect(my_phi) - >>> #or + >>> # or >>> op.inputs.phi(my_phi) - """ return self._phi + class OutputsCyclicExpandedStress(_Outputs): - """Intermediate class used to get outputs from cyclic_expanded_stress operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_stress() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes() + """Intermediate class used to get outputs from + cyclic_expanded_stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_stress() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_stress._spec().outputs, op) - self._fields_container = Output(cyclic_expanded_stress._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_expanded_stress._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) - self._expanded_meshes = Output(cyclic_expanded_stress._spec().output_pin(1), 1, op) + self._expanded_meshes = Output( + cyclic_expanded_stress._spec().output_pin(1), 1, op + ) self._outputs.append(self._expanded_meshes) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def expanded_meshes(self): """Allows to get expanded_meshes output of the operator - Returns ---------- - my_expanded_meshes : MeshesContainer, + my_expanded_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_stress() >>> # Connect inputs : op.inputs. ... - >>> result_expanded_meshes = op.outputs.expanded_meshes() - """ + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ # noqa: E501 return self._expanded_meshes - diff --git a/ansys/dpf/core/operators/result/cyclic_expanded_velocity.py b/ansys/dpf/core/operators/result/cyclic_expanded_velocity.py index fa4f23a2a7c..26ab076750f 100644 --- a/ansys/dpf/core/operators/result/cyclic_expanded_velocity.py +++ b/ansys/dpf/core/operators/result/cyclic_expanded_velocity.py @@ -1,135 +1,283 @@ """ cyclic_expanded_velocity -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cyclic_expanded_velocity(Operator): """Read velocity from an rst file and expand it with cyclic symmetry. - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - sector_mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) (optional) - - sectors_to_expand (list, Scoping, ScopingsContainer) (optional) - - phi (float) (optional) - - available outputs: - - fields_container (FieldsContainer) - - expanded_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_expanded_velocity() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_expanded_velocity(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,bool_rotate_to_global=my_bool_rotate_to_global,sector_mesh=my_sector_mesh,requested_location=my_requested_location,read_cyclic=my_read_cyclic,expanded_meshed_region=my_expanded_meshed_region,cyclic_support=my_cyclic_support,sectors_to_expand=my_sectors_to_expand,phi=my_phi) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, bool_rotate_to_global=None, sector_mesh=None, requested_location=None, read_cyclic=None, expanded_meshed_region=None, cyclic_support=None, sectors_to_expand=None, phi=None, config=None, server=None): - super().__init__(name="mapdl::rst::V_cyclic", config = config, server = server) + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + bool_rotate_to_global : bool, optional + Default is true + sector_mesh : MeshedRegion or MeshesContainer, optional + Mesh of the base sector (can be a skin). + requested_location : str, optional + Location needed in output + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh expanded. + cyclic_support : CyclicSupport, optional + sectors_to_expand : Scoping or ScopingsContainer, optional + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. + phi : float, optional + Angle phi (default value 0.0) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_expanded_velocity() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_expanded_velocity( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... sector_mesh=my_sector_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... expanded_meshed_region=my_expanded_meshed_region, + ... cyclic_support=my_cyclic_support, + ... sectors_to_expand=my_sectors_to_expand, + ... phi=my_phi, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + sector_mesh=None, + requested_location=None, + read_cyclic=None, + expanded_meshed_region=None, + cyclic_support=None, + sectors_to_expand=None, + phi=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::V_cyclic", config=config, server=server) self._inputs = InputsCyclicExpandedVelocity(self) self._outputs = OutputsCyclicExpandedVelocity(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if sector_mesh !=None: + if sector_mesh is not None: self.inputs.sector_mesh.connect(sector_mesh) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) - if read_cyclic !=None: + if read_cyclic is not None: self.inputs.read_cyclic.connect(read_cyclic) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) - if sectors_to_expand !=None: + if sectors_to_expand is not None: self.inputs.sectors_to_expand.connect(sectors_to_expand) - if phi !=None: + if phi is not None: self.inputs.phi.connect(phi) @staticmethod def _spec(): - spec = Specification(description="""Read velocity from an rst file and expand it with cyclic symmetry.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 7 : PinSpecification(name = "sector_mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the base sector (can be a skin)."""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""location needed in output"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)"""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh expanded."""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=True, document=""""""), - 18 : PinSpecification(name = "sectors_to_expand", type_names=["vector","scoping","scopings_container"], optional=True, document="""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label."""), - 19 : PinSpecification(name = "phi", type_names=["double"], optional=True, document="""angle phi (default value 0.0)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in"""), - 1 : PinSpecification(name = "expanded_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = ( + """Read velocity from an rst file and expand it with cyclic symmetry.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 7: PinSpecification( + name="sector_mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the base sector (can be a skin).""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Location needed in output""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh expanded.""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=True, + document="""""", + ), + 18: PinSpecification( + name="sectors_to_expand", + type_names=["vector", "scoping", "scopings_container"], + optional=True, + document="""Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label.""", + ), + 19: PinSpecification( + name="phi", + type_names=["double"], + optional=True, + document="""Angle phi (default value 0.0)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + 1: PinSpecification( + name="expanded_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::V_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::V_cyclic", server=server) @property def inputs(self): @@ -137,423 +285,426 @@ def inputs(self): Returns -------- - inputs : InputsCyclicExpandedVelocity + inputs : InputsCyclicExpandedVelocity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicExpandedVelocity + outputs : OutputsCyclicExpandedVelocity """ return super().outputs -#internal name: mapdl::rst::V_cyclic -#scripting name: cyclic_expanded_velocity class InputsCyclicExpandedVelocity(_Inputs): - """Intermediate class used to connect user inputs to cyclic_expanded_velocity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_velocity() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> my_sectors_to_expand = dpf.list() - >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> my_phi = float() - >>> op.inputs.phi.connect(my_phi) + """Intermediate class used to connect user inputs to + cyclic_expanded_velocity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_velocity() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + >>> my_sectors_to_expand = dpf.Scoping() + >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) + >>> my_phi = float() + >>> op.inputs.phi.connect(my_phi) """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_velocity._spec().inputs, op) - self._time_scoping = Input(cyclic_expanded_velocity._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + cyclic_expanded_velocity._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_expanded_velocity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + cyclic_expanded_velocity._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_expanded_velocity._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_expanded_velocity._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(cyclic_expanded_velocity._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + cyclic_expanded_velocity._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_expanded_velocity._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + cyclic_expanded_velocity._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(cyclic_expanded_velocity._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_expanded_velocity._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._sector_mesh = Input(cyclic_expanded_velocity._spec().input_pin(7), 7, op, -1) + self._sector_mesh = Input( + cyclic_expanded_velocity._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._sector_mesh) - self._requested_location = Input(cyclic_expanded_velocity._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + cyclic_expanded_velocity._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(cyclic_expanded_velocity._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + cyclic_expanded_velocity._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(cyclic_expanded_velocity._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_expanded_velocity._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._cyclic_support = Input(cyclic_expanded_velocity._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_expanded_velocity._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) - self._sectors_to_expand = Input(cyclic_expanded_velocity._spec().input_pin(18), 18, op, -1) + self._sectors_to_expand = Input( + cyclic_expanded_velocity._spec().input_pin(18), 18, op, -1 + ) self._inputs.append(self._sectors_to_expand) - self._phi = Input(cyclic_expanded_velocity._spec().input_pin(19), 19, op, -1) + self._phi = Input(cyclic_expanded_velocity._spec().input_pin(19), 19, op, -1) self._inputs.append(self._phi) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def sector_mesh(self): - """Allows to connect sector_mesh input to the operator + """Allows to connect sector_mesh input to the operator. - - pindoc: mesh of the base sector (can be a skin). + Mesh of the base sector (can be a skin). Parameters ---------- - my_sector_mesh : MeshedRegion, MeshesContainer, + my_sector_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> #or + >>> # or >>> op.inputs.sector_mesh(my_sector_mesh) - """ return self._sector_mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: location needed in output + Location needed in output Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: mesh expanded. + Mesh expanded. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support @property def sectors_to_expand(self): - """Allows to connect sectors_to_expand input to the operator + """Allows to connect sectors_to_expand input to the operator. - - pindoc: sectors to expand (start at 0), for multistage: use scopings container with 'stage' label. + Sectors to expand (start at 0), for + multistage: use scopings container + with 'stage' label. Parameters ---------- - my_sectors_to_expand : list, Scoping, ScopingsContainer, + my_sectors_to_expand : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.sectors_to_expand.connect(my_sectors_to_expand) - >>> #or + >>> # or >>> op.inputs.sectors_to_expand(my_sectors_to_expand) - """ return self._sectors_to_expand @property def phi(self): - """Allows to connect phi input to the operator + """Allows to connect phi input to the operator. - - pindoc: angle phi (default value 0.0) + Angle phi (default value 0.0) Parameters ---------- - my_phi : float, + my_phi : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> op.inputs.phi.connect(my_phi) - >>> #or + >>> # or >>> op.inputs.phi(my_phi) - """ return self._phi + class OutputsCyclicExpandedVelocity(_Outputs): - """Intermediate class used to get outputs from cyclic_expanded_velocity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expanded_velocity() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes() + """Intermediate class used to get outputs from + cyclic_expanded_velocity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expanded_velocity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_expanded_velocity._spec().outputs, op) - self._fields_container = Output(cyclic_expanded_velocity._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_expanded_velocity._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) - self._expanded_meshes = Output(cyclic_expanded_velocity._spec().output_pin(1), 1, op) + self._expanded_meshes = Output( + cyclic_expanded_velocity._spec().output_pin(1), 1, op + ) self._outputs.append(self._expanded_meshes) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def expanded_meshes(self): """Allows to get expanded_meshes output of the operator - Returns ---------- - my_expanded_meshes : MeshesContainer, + my_expanded_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expanded_velocity() >>> # Connect inputs : op.inputs. ... - >>> result_expanded_meshes = op.outputs.expanded_meshes() - """ + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ # noqa: E501 return self._expanded_meshes - diff --git a/ansys/dpf/core/operators/result/cyclic_expansion.py b/ansys/dpf/core/operators/result/cyclic_expansion.py index d16cb82adbd..cb5be8bab8b 100644 --- a/ansys/dpf/core/operators/result/cyclic_expansion.py +++ b/ansys/dpf/core/operators/result/cyclic_expansion.py @@ -1,84 +1,152 @@ """ cyclic_expansion -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "result" category -""" class cyclic_expansion(Operator): - """Expand cyclic results from a fieldsContainer for given sets, sectors and scoping (optionals). - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) - - bool_rotate_to_global (bool) (optional) - - cyclic_support (CyclicSupport) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_expansion() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_expansion(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,bool_rotate_to_global=my_bool_rotate_to_global,cyclic_support=my_cyclic_support) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, bool_rotate_to_global=None, cyclic_support=None, config=None, server=None): - super().__init__(name="cyclic_expansion", config = config, server = server) + """Expand cyclic results from a fieldsContainer for given sets, sectors + and scoping (optionals). + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer + Field container with the base and duplicate + sectors + bool_rotate_to_global : bool, optional + Default is true + cyclic_support : CyclicSupport + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_expansion() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_expansion( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... cyclic_support=my_cyclic_support, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + bool_rotate_to_global=None, + cyclic_support=None, + config=None, + server=None, + ): + super().__init__(name="cyclic_expansion", config=config, server=server) self._inputs = InputsCyclicExpansion(self) self._outputs = OutputsCyclicExpansion(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) @staticmethod def _spec(): - spec = Specification(description="""Expand cyclic results from a fieldsContainer for given sets, sectors and scoping (optionals).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field container with the base and duplicate sectors"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""default is true"""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Expand cyclic results from a fieldsContainer for given sets, sectors + and scoping (optionals).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field container with the base and duplicate + sectors""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "cyclic_expansion") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="cyclic_expansion", server=server) @property def inputs(self): @@ -86,193 +154,182 @@ def inputs(self): Returns -------- - inputs : InputsCyclicExpansion + inputs : InputsCyclicExpansion """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicExpansion + outputs : OutputsCyclicExpansion """ return super().outputs -#internal name: cyclic_expansion -#scripting name: cyclic_expansion class InputsCyclicExpansion(_Inputs): - """Intermediate class used to connect user inputs to cyclic_expansion operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expansion() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) + """Intermediate class used to connect user inputs to + cyclic_expansion operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expansion() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) """ + def __init__(self, op: Operator): super().__init__(cyclic_expansion._spec().inputs, op) - self._time_scoping = Input(cyclic_expansion._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(cyclic_expansion._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_expansion._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(cyclic_expansion._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_expansion._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(cyclic_expansion._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._bool_rotate_to_global = Input(cyclic_expansion._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_expansion._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._cyclic_support = Input(cyclic_expansion._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input(cyclic_expansion._spec().input_pin(16), 16, op, -1) self._inputs.append(self._cyclic_support) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expansion() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expansion() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field container with the base and duplicate sectors + Field container with the base and duplicate + sectors Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expansion() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expansion() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expansion() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support + class OutputsCyclicExpansion(_Outputs): - """Intermediate class used to get outputs from cyclic_expansion operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_expansion() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + cyclic_expansion operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_expansion() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(cyclic_expansion._spec().outputs, op) - self._fields_container = Output(cyclic_expansion._spec().output_pin(0), 0, op) + self._fields_container = Output(cyclic_expansion._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_expansion() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/cyclic_strain_energy.py b/ansys/dpf/core/operators/result/cyclic_strain_energy.py index 83a599d1ad2..57f2b966dd3 100644 --- a/ansys/dpf/core/operators/result/cyclic_strain_energy.py +++ b/ansys/dpf/core/operators/result/cyclic_strain_energy.py @@ -1,117 +1,237 @@ """ cyclic_strain_energy -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cyclic_strain_energy(Operator): """Computes mapdl::rst::ENG_SE from an rst file. - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - sector_mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) (optional) - - available outputs: - - fields_container (FieldsContainer) - - expanded_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_strain_energy() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_strain_energy(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,bool_rotate_to_global=my_bool_rotate_to_global,sector_mesh=my_sector_mesh,read_cyclic=my_read_cyclic,expanded_meshed_region=my_expanded_meshed_region,cyclic_support=my_cyclic_support) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, bool_rotate_to_global=None, sector_mesh=None, read_cyclic=None, expanded_meshed_region=None, cyclic_support=None, config=None, server=None): - super().__init__(name="mapdl::rst::ENG_SE_cyclic", config = config, server = server) + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + bool_rotate_to_global : bool, optional + If true the field is roated to global + coordinate system (default true) + sector_mesh : MeshedRegion or MeshesContainer, optional + Mesh of the base sector (can be a skin). + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh expanded. + cyclic_support : CyclicSupport, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_strain_energy() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_strain_energy( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... sector_mesh=my_sector_mesh, + ... read_cyclic=my_read_cyclic, + ... expanded_meshed_region=my_expanded_meshed_region, + ... cyclic_support=my_cyclic_support, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + sector_mesh=None, + read_cyclic=None, + expanded_meshed_region=None, + cyclic_support=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::ENG_SE_cyclic", config=config, server=server) self._inputs = InputsCyclicStrainEnergy(self) self._outputs = OutputsCyclicStrainEnergy(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if sector_mesh !=None: + if sector_mesh is not None: self.inputs.sector_mesh.connect(sector_mesh) - if read_cyclic !=None: + if read_cyclic is not None: self.inputs.read_cyclic.connect(read_cyclic) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) @staticmethod def _spec(): - spec = Specification(description="""Computes mapdl::rst::ENG_SE from an rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is roated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "sector_mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the base sector (can be a skin)."""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)"""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh expanded."""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in"""), - 1 : PinSpecification(name = "expanded_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = """Computes mapdl::rst::ENG_SE from an rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is roated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="sector_mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the base sector (can be a skin).""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh expanded.""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + 1: PinSpecification( + name="expanded_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::ENG_SE_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::ENG_SE_cyclic", server=server) @property def inputs(self): @@ -119,345 +239,341 @@ def inputs(self): Returns -------- - inputs : InputsCyclicStrainEnergy + inputs : InputsCyclicStrainEnergy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicStrainEnergy + outputs : OutputsCyclicStrainEnergy """ return super().outputs -#internal name: mapdl::rst::ENG_SE_cyclic -#scripting name: cyclic_strain_energy class InputsCyclicStrainEnergy(_Inputs): - """Intermediate class used to connect user inputs to cyclic_strain_energy operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_strain_energy() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) + """Intermediate class used to connect user inputs to + cyclic_strain_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_strain_energy() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) """ + def __init__(self, op: Operator): super().__init__(cyclic_strain_energy._spec().inputs, op) - self._time_scoping = Input(cyclic_strain_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(cyclic_strain_energy._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_strain_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(cyclic_strain_energy._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_strain_energy._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + cyclic_strain_energy._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(cyclic_strain_energy._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + cyclic_strain_energy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_strain_energy._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(cyclic_strain_energy._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(cyclic_strain_energy._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_strain_energy._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._sector_mesh = Input(cyclic_strain_energy._spec().input_pin(7), 7, op, -1) + self._sector_mesh = Input(cyclic_strain_energy._spec().input_pin(7), 7, op, -1) self._inputs.append(self._sector_mesh) - self._read_cyclic = Input(cyclic_strain_energy._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + cyclic_strain_energy._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(cyclic_strain_energy._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_strain_energy._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._cyclic_support = Input(cyclic_strain_energy._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input( + cyclic_strain_energy._spec().input_pin(16), 16, op, -1 + ) self._inputs.append(self._cyclic_support) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is roated to global coordinate system (default true) + If true the field is roated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def sector_mesh(self): - """Allows to connect sector_mesh input to the operator + """Allows to connect sector_mesh input to the operator. - - pindoc: mesh of the base sector (can be a skin). + Mesh of the base sector (can be a skin). Parameters ---------- - my_sector_mesh : MeshedRegion, MeshesContainer, + my_sector_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> #or + >>> # or >>> op.inputs.sector_mesh(my_sector_mesh) - """ return self._sector_mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: mesh expanded. + Mesh expanded. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support + class OutputsCyclicStrainEnergy(_Outputs): - """Intermediate class used to get outputs from cyclic_strain_energy operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_strain_energy() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes() + """Intermediate class used to get outputs from + cyclic_strain_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_strain_energy() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_strain_energy._spec().outputs, op) - self._fields_container = Output(cyclic_strain_energy._spec().output_pin(0), 0, op) + self._fields_container = Output( + cyclic_strain_energy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) - self._expanded_meshes = Output(cyclic_strain_energy._spec().output_pin(1), 1, op) + self._expanded_meshes = Output( + cyclic_strain_energy._spec().output_pin(1), 1, op + ) self._outputs.append(self._expanded_meshes) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def expanded_meshes(self): """Allows to get expanded_meshes output of the operator - Returns ---------- - my_expanded_meshes : MeshesContainer, + my_expanded_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_strain_energy() >>> # Connect inputs : op.inputs. ... - >>> result_expanded_meshes = op.outputs.expanded_meshes() - """ + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ # noqa: E501 return self._expanded_meshes - diff --git a/ansys/dpf/core/operators/result/cyclic_volume.py b/ansys/dpf/core/operators/result/cyclic_volume.py index 0447cdd93fe..acf61000a45 100644 --- a/ansys/dpf/core/operators/result/cyclic_volume.py +++ b/ansys/dpf/core/operators/result/cyclic_volume.py @@ -1,117 +1,239 @@ """ cyclic_volume -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class cyclic_volume(Operator): """Read mapdl::rst::ENG_VOL from an rst file. - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - sector_mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - expanded_meshed_region (MeshedRegion, MeshesContainer) (optional) - - cyclic_support (CyclicSupport) (optional) - - available outputs: - - fields_container (FieldsContainer) - - expanded_meshes (MeshesContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.cyclic_volume() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.cyclic_volume(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,bool_rotate_to_global=my_bool_rotate_to_global,sector_mesh=my_sector_mesh,read_cyclic=my_read_cyclic,expanded_meshed_region=my_expanded_meshed_region,cyclic_support=my_cyclic_support) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, bool_rotate_to_global=None, sector_mesh=None, read_cyclic=None, expanded_meshed_region=None, cyclic_support=None, config=None, server=None): - super().__init__(name="mapdl::rst::ENG_VOL_cyclic", config = config, server = server) + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + bool_rotate_to_global : bool, optional + If true the field is roated to global + coordinate system (default true) + sector_mesh : MeshedRegion or MeshesContainer, optional + Mesh of the base sector (can be a skin). + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + expanded_meshed_region : MeshedRegion or MeshesContainer, optional + Mesh expanded. + cyclic_support : CyclicSupport, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.cyclic_volume() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.cyclic_volume( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... sector_mesh=my_sector_mesh, + ... read_cyclic=my_read_cyclic, + ... expanded_meshed_region=my_expanded_meshed_region, + ... cyclic_support=my_cyclic_support, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + sector_mesh=None, + read_cyclic=None, + expanded_meshed_region=None, + cyclic_support=None, + config=None, + server=None, + ): + super().__init__( + name="mapdl::rst::ENG_VOL_cyclic", config=config, server=server + ) self._inputs = InputsCyclicVolume(self) self._outputs = OutputsCyclicVolume(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if bool_rotate_to_global !=None: + if bool_rotate_to_global is not None: self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) - if sector_mesh !=None: + if sector_mesh is not None: self.inputs.sector_mesh.connect(sector_mesh) - if read_cyclic !=None: + if read_cyclic is not None: self.inputs.read_cyclic.connect(read_cyclic) - if expanded_meshed_region !=None: + if expanded_meshed_region is not None: self.inputs.expanded_meshed_region.connect(expanded_meshed_region) - if cyclic_support !=None: + if cyclic_support is not None: self.inputs.cyclic_support.connect(cyclic_support) @staticmethod def _spec(): - spec = Specification(description="""Read mapdl::rst::ENG_VOL from an rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is roated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "sector_mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh of the base sector (can be a skin)."""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)"""), - 15 : PinSpecification(name = "expanded_meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""mesh expanded."""), - 16 : PinSpecification(name = "cyclic_support", type_names=["cyclic_support"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in"""), - 1 : PinSpecification(name = "expanded_meshes", type_names=["meshes_container"], optional=False, document="""""")}) + description = """Read mapdl::rst::ENG_VOL from an rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is roated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="sector_mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh of the base sector (can be a skin).""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 15: PinSpecification( + name="expanded_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Mesh expanded.""", + ), + 16: PinSpecification( + name="cyclic_support", + type_names=["cyclic_support"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + 1: PinSpecification( + name="expanded_meshes", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::ENG_VOL_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::ENG_VOL_cyclic", server=server) @property def inputs(self): @@ -119,345 +241,329 @@ def inputs(self): Returns -------- - inputs : InputsCyclicVolume + inputs : InputsCyclicVolume """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCyclicVolume + outputs : OutputsCyclicVolume """ return super().outputs -#internal name: mapdl::rst::ENG_VOL_cyclic -#scripting name: cyclic_volume class InputsCyclicVolume(_Inputs): - """Intermediate class used to connect user inputs to cyclic_volume operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_volume() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_sector_mesh = dpf.MeshedRegion() - >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> my_expanded_meshed_region = dpf.MeshedRegion() - >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> my_cyclic_support = dpf.CyclicSupport() - >>> op.inputs.cyclic_support.connect(my_cyclic_support) + """Intermediate class used to connect user inputs to + cyclic_volume operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_volume() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_sector_mesh = dpf.MeshedRegion() + >>> op.inputs.sector_mesh.connect(my_sector_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_expanded_meshed_region = dpf.MeshedRegion() + >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) + >>> my_cyclic_support = dpf.CyclicSupport() + >>> op.inputs.cyclic_support.connect(my_cyclic_support) """ + def __init__(self, op: Operator): super().__init__(cyclic_volume._spec().inputs, op) - self._time_scoping = Input(cyclic_volume._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(cyclic_volume._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(cyclic_volume._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(cyclic_volume._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(cyclic_volume._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(cyclic_volume._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(cyclic_volume._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(cyclic_volume._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(cyclic_volume._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(cyclic_volume._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(cyclic_volume._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + cyclic_volume._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._sector_mesh = Input(cyclic_volume._spec().input_pin(7), 7, op, -1) + self._sector_mesh = Input(cyclic_volume._spec().input_pin(7), 7, op, -1) self._inputs.append(self._sector_mesh) - self._read_cyclic = Input(cyclic_volume._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(cyclic_volume._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._expanded_meshed_region = Input(cyclic_volume._spec().input_pin(15), 15, op, -1) + self._expanded_meshed_region = Input( + cyclic_volume._spec().input_pin(15), 15, op, -1 + ) self._inputs.append(self._expanded_meshed_region) - self._cyclic_support = Input(cyclic_volume._spec().input_pin(16), 16, op, -1) + self._cyclic_support = Input(cyclic_volume._spec().input_pin(16), 16, op, -1) self._inputs.append(self._cyclic_support) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is roated to global coordinate system (default true) + If true the field is roated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def sector_mesh(self): - """Allows to connect sector_mesh input to the operator + """Allows to connect sector_mesh input to the operator. - - pindoc: mesh of the base sector (can be a skin). + Mesh of the base sector (can be a skin). Parameters ---------- - my_sector_mesh : MeshedRegion, MeshesContainer, + my_sector_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.sector_mesh.connect(my_sector_mesh) - >>> #or + >>> # or >>> op.inputs.sector_mesh(my_sector_mesh) - """ return self._sector_mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic @property def expanded_meshed_region(self): - """Allows to connect expanded_meshed_region input to the operator + """Allows to connect expanded_meshed_region input to the operator. - - pindoc: mesh expanded. + Mesh expanded. Parameters ---------- - my_expanded_meshed_region : MeshedRegion, MeshesContainer, + my_expanded_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.expanded_meshed_region.connect(my_expanded_meshed_region) - >>> #or + >>> # or >>> op.inputs.expanded_meshed_region(my_expanded_meshed_region) - """ return self._expanded_meshed_region @property def cyclic_support(self): - """Allows to connect cyclic_support input to the operator + """Allows to connect cyclic_support input to the operator. Parameters ---------- - my_cyclic_support : CyclicSupport, + my_cyclic_support : CyclicSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> op.inputs.cyclic_support.connect(my_cyclic_support) - >>> #or + >>> # or >>> op.inputs.cyclic_support(my_cyclic_support) - """ return self._cyclic_support + class OutputsCyclicVolume(_Outputs): - """Intermediate class used to get outputs from cyclic_volume operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.cyclic_volume() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - >>> result_expanded_meshes = op.outputs.expanded_meshes() + """Intermediate class used to get outputs from + cyclic_volume operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.cyclic_volume() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_expanded_meshes = op.outputs.expanded_meshes() """ + def __init__(self, op: Operator): super().__init__(cyclic_volume._spec().outputs, op) - self._fields_container = Output(cyclic_volume._spec().output_pin(0), 0, op) + self._fields_container = Output(cyclic_volume._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) - self._expanded_meshes = Output(cyclic_volume._spec().output_pin(1), 1, op) + self._expanded_meshes = Output(cyclic_volume._spec().output_pin(1), 1, op) self._outputs.append(self._expanded_meshes) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container @property def expanded_meshes(self): """Allows to get expanded_meshes output of the operator - Returns ---------- - my_expanded_meshes : MeshesContainer, + my_expanded_meshes : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.cyclic_volume() >>> # Connect inputs : op.inputs. ... - >>> result_expanded_meshes = op.outputs.expanded_meshes() - """ + >>> result_expanded_meshes = op.outputs.expanded_meshes() + """ # noqa: E501 return self._expanded_meshes - diff --git a/ansys/dpf/core/operators/result/displacement.py b/ansys/dpf/core/operators/result/displacement.py index 3f42a94a7f5..3d088e1618e 100644 --- a/ansys/dpf/core/operators/result/displacement.py +++ b/ansys/dpf/core/operators/result/displacement.py @@ -1,92 +1,238 @@ """ displacement -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class displacement(Operator): - """Read/compute nodal displacements by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.displacement() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.displacement(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="U", config = config, server = server) + """Read/compute nodal displacements by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.displacement() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.displacement( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="U", config=config, server=server) self._inputs = InputsDisplacement(self) self._outputs = OutputsDisplacement(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal displacements by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal displacements by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "U") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="U", server=server) @property def inputs(self): @@ -94,275 +240,277 @@ def inputs(self): Returns -------- - inputs : InputsDisplacement + inputs : InputsDisplacement """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsDisplacement + outputs : OutputsDisplacement """ return super().outputs -#internal name: U -#scripting name: displacement class InputsDisplacement(_Inputs): - """Intermediate class used to connect user inputs to displacement operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.displacement() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + displacement operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.displacement() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(displacement._spec().inputs, op) - self._time_scoping = Input(displacement._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(displacement._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(displacement._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(displacement._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(displacement._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(displacement._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(displacement._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(displacement._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(displacement._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(displacement._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(displacement._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + displacement._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(displacement._spec().input_pin(7), 7, op, -1) + self._mesh = Input(displacement._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(displacement._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(displacement._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsDisplacement(_Outputs): - """Intermediate class used to get outputs from displacement operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.displacement() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + displacement operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.displacement() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(displacement._spec().outputs, op) - self._fields_container = Output(displacement._spec().output_pin(0), 0, op) + self._fields_container = Output(displacement._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/displacement_X.py b/ansys/dpf/core/operators/result/displacement_X.py index 20eaf3d98cb..da906aa8c02 100644 --- a/ansys/dpf/core/operators/result/displacement_X.py +++ b/ansys/dpf/core/operators/result/displacement_X.py @@ -1,92 +1,239 @@ """ displacement_X -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class displacement_X(Operator): - """Read/compute nodal displacements X component of the vector (1st component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.displacement_X() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.displacement_X(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="UX", config = config, server = server) + """Read/compute nodal displacements X component of the vector (1st + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.displacement_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.displacement_X( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="UX", config=config, server=server) self._inputs = InputsDisplacementX(self) self._outputs = OutputsDisplacementX(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal displacements X component of the vector (1st component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal displacements X component of the vector (1st + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "UX") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="UX", server=server) @property def inputs(self): @@ -94,275 +241,277 @@ def inputs(self): Returns -------- - inputs : InputsDisplacementX + inputs : InputsDisplacementX """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsDisplacementX + outputs : OutputsDisplacementX """ return super().outputs -#internal name: UX -#scripting name: displacement_X class InputsDisplacementX(_Inputs): - """Intermediate class used to connect user inputs to displacement_X operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.displacement_X() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + displacement_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.displacement_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(displacement_X._spec().inputs, op) - self._time_scoping = Input(displacement_X._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(displacement_X._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(displacement_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(displacement_X._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(displacement_X._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(displacement_X._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(displacement_X._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(displacement_X._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(displacement_X._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(displacement_X._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(displacement_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + displacement_X._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(displacement_X._spec().input_pin(7), 7, op, -1) + self._mesh = Input(displacement_X._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(displacement_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(displacement_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsDisplacementX(_Outputs): - """Intermediate class used to get outputs from displacement_X operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.displacement_X() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + displacement_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.displacement_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(displacement_X._spec().outputs, op) - self._fields_container = Output(displacement_X._spec().output_pin(0), 0, op) + self._fields_container = Output(displacement_X._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_X() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/displacement_Y.py b/ansys/dpf/core/operators/result/displacement_Y.py index 5dbb23cca6f..2e0b88e05fe 100644 --- a/ansys/dpf/core/operators/result/displacement_Y.py +++ b/ansys/dpf/core/operators/result/displacement_Y.py @@ -1,92 +1,239 @@ """ displacement_Y -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class displacement_Y(Operator): - """Read/compute nodal displacements Y component of the vector (2nd component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.displacement_Y() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.displacement_Y(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="UY", config = config, server = server) + """Read/compute nodal displacements Y component of the vector (2nd + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.displacement_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.displacement_Y( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="UY", config=config, server=server) self._inputs = InputsDisplacementY(self) self._outputs = OutputsDisplacementY(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal displacements Y component of the vector (2nd component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal displacements Y component of the vector (2nd + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "UY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="UY", server=server) @property def inputs(self): @@ -94,275 +241,277 @@ def inputs(self): Returns -------- - inputs : InputsDisplacementY + inputs : InputsDisplacementY """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsDisplacementY + outputs : OutputsDisplacementY """ return super().outputs -#internal name: UY -#scripting name: displacement_Y class InputsDisplacementY(_Inputs): - """Intermediate class used to connect user inputs to displacement_Y operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.displacement_Y() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + displacement_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.displacement_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(displacement_Y._spec().inputs, op) - self._time_scoping = Input(displacement_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(displacement_Y._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(displacement_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(displacement_Y._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(displacement_Y._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(displacement_Y._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(displacement_Y._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(displacement_Y._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(displacement_Y._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(displacement_Y._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(displacement_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + displacement_Y._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(displacement_Y._spec().input_pin(7), 7, op, -1) + self._mesh = Input(displacement_Y._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(displacement_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(displacement_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsDisplacementY(_Outputs): - """Intermediate class used to get outputs from displacement_Y operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.displacement_Y() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + displacement_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.displacement_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(displacement_Y._spec().outputs, op) - self._fields_container = Output(displacement_Y._spec().output_pin(0), 0, op) + self._fields_container = Output(displacement_Y._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Y() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/displacement_Z.py b/ansys/dpf/core/operators/result/displacement_Z.py index 535e200fa91..e75e1bc866b 100644 --- a/ansys/dpf/core/operators/result/displacement_Z.py +++ b/ansys/dpf/core/operators/result/displacement_Z.py @@ -1,92 +1,239 @@ """ displacement_Z -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class displacement_Z(Operator): - """Read/compute nodal displacements Z component of the vector (3rd component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.displacement_Z() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.displacement_Z(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="UZ", config = config, server = server) + """Read/compute nodal displacements Z component of the vector (3rd + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.displacement_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.displacement_Z( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="UZ", config=config, server=server) self._inputs = InputsDisplacementZ(self) self._outputs = OutputsDisplacementZ(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal displacements Z component of the vector (3rd component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal displacements Z component of the vector (3rd + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "UZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="UZ", server=server) @property def inputs(self): @@ -94,275 +241,277 @@ def inputs(self): Returns -------- - inputs : InputsDisplacementZ + inputs : InputsDisplacementZ """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsDisplacementZ + outputs : OutputsDisplacementZ """ return super().outputs -#internal name: UZ -#scripting name: displacement_Z class InputsDisplacementZ(_Inputs): - """Intermediate class used to connect user inputs to displacement_Z operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.displacement_Z() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + displacement_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.displacement_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(displacement_Z._spec().inputs, op) - self._time_scoping = Input(displacement_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(displacement_Z._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(displacement_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(displacement_Z._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(displacement_Z._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(displacement_Z._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(displacement_Z._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(displacement_Z._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(displacement_Z._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(displacement_Z._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(displacement_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + displacement_Z._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(displacement_Z._spec().input_pin(7), 7, op, -1) + self._mesh = Input(displacement_Z._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(displacement_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(displacement_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsDisplacementZ(_Outputs): - """Intermediate class used to get outputs from displacement_Z operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.displacement_Z() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + displacement_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.displacement_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(displacement_Z._spec().outputs, op) - self._fields_container = Output(displacement_Z._spec().output_pin(0), 0, op) + self._fields_container = Output(displacement_Z._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.displacement_Z() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain.py b/ansys/dpf/core/operators/result/elastic_strain.py index 3f1466d79e8..8e9cf73db0f 100644 --- a/ansys/dpf/core/operators/result/elastic_strain.py +++ b/ansys/dpf/core/operators/result/elastic_strain.py @@ -1,98 +1,274 @@ """ elastic_strain -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain(Operator): - """Read/compute element nodal component elastic strains by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPEL", config = config, server = server) + """Read/compute element nodal component elastic strains by calling the + readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="EPEL", config=config, server=server) self._inputs = InputsElasticStrain(self) self._outputs = OutputsElasticStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains by calling the + readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPEL") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPEL", server=server) @property def inputs(self): @@ -100,301 +276,327 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrain + inputs : InputsElasticStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrain + outputs : OutputsElasticStrain """ return super().outputs -#internal name: EPEL -#scripting name: elastic_strain class InputsElasticStrain(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(elastic_strain._spec().inputs, op) - self._time_scoping = Input(elastic_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elastic_strain._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elastic_strain._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(elastic_strain._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(elastic_strain._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elastic_strain._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(elastic_strain._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elastic_strain._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(elastic_strain._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsElasticStrain(_Outputs): - """Intermediate class used to get outputs from elastic_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain._spec().outputs, op) - self._fields_container = Output(elastic_strain._spec().output_pin(0), 0, op) + self._fields_container = Output(elastic_strain._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_X.py b/ansys/dpf/core/operators/result/elastic_strain_X.py index 06516edf9b5..15024173e55 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_X.py +++ b/ansys/dpf/core/operators/result/elastic_strain_X.py @@ -1,98 +1,258 @@ """ elastic_strain_X -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_X(Operator): - """Read/compute element nodal component elastic strains XX normal component (00 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_X() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_X(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPELX", config = config, server = server) + """Read/compute element nodal component elastic strains XX normal + component (00 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_X( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPELX", config=config, server=server) self._inputs = InputsElasticStrainX(self) self._outputs = OutputsElasticStrainX(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains XX normal component (00 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains XX normal + component (00 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPELX") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPELX", server=server) @property def inputs(self): @@ -100,301 +260,305 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainX + inputs : InputsElasticStrainX """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainX + outputs : OutputsElasticStrainX """ return super().outputs -#internal name: EPELX -#scripting name: elastic_strain_X class InputsElasticStrainX(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_X operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_X() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elastic_strain_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_X._spec().inputs, op) - self._time_scoping = Input(elastic_strain_X._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elastic_strain_X._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elastic_strain_X._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_X._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(elastic_strain_X._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_X._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_X._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elastic_strain_X._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_X._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_X._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_X._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_X._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_X._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elastic_strain_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElasticStrainX(_Outputs): - """Intermediate class used to get outputs from elastic_strain_X operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_X() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_X._spec().outputs, op) - self._fields_container = Output(elastic_strain_X._spec().output_pin(0), 0, op) + self._fields_container = Output(elastic_strain_X._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_X() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_XY.py b/ansys/dpf/core/operators/result/elastic_strain_XY.py index 87c4e5e5448..e4bd175698d 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_XY.py +++ b/ansys/dpf/core/operators/result/elastic_strain_XY.py @@ -1,98 +1,258 @@ """ elastic_strain_XY -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_XY(Operator): - """Read/compute element nodal component elastic strains XY shear component (01 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_XY() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_XY(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPELXY", config = config, server = server) - self._inputs = InputsElasticStrainXY(self) - self._outputs = OutputsElasticStrainXY(self) - if time_scoping !=None: + """Read/compute element nodal component elastic strains XY shear + component (01 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_XY() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_XY( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPELXY", config=config, server=server) + self._inputs = InputsElasticStrainXy(self) + self._outputs = OutputsElasticStrainXy(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains XY shear component (01 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains XY shear + component (01 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPELXY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPELXY", server=server) @property def inputs(self): @@ -100,301 +260,307 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainXY + inputs : InputsElasticStrainXy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainXY + outputs : OutputsElasticStrainXy """ return super().outputs -#internal name: EPELXY -#scripting name: elastic_strain_XY -class InputsElasticStrainXY(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_XY operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_XY() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsElasticStrainXy(_Inputs): + """Intermediate class used to connect user inputs to + elastic_strain_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_XY() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_XY._spec().inputs, op) - self._time_scoping = Input(elastic_strain_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elastic_strain_XY._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elastic_strain_XY._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_XY._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + elastic_strain_XY._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_XY._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_XY._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_XY._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elastic_strain_XY._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_XY._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_XY._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_XY._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_XY._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_XY._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_XY._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elastic_strain_XY._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsElasticStrainXY(_Outputs): - """Intermediate class used to get outputs from elastic_strain_XY operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsElasticStrainXy(_Outputs): + """Intermediate class used to get outputs from + elastic_strain_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_XY._spec().outputs, op) - self._fields_container = Output(elastic_strain_XY._spec().output_pin(0), 0, op) + self._fields_container = Output(elastic_strain_XY._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XY() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_XZ.py b/ansys/dpf/core/operators/result/elastic_strain_XZ.py index cb27189c9b0..084f994ff11 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_XZ.py +++ b/ansys/dpf/core/operators/result/elastic_strain_XZ.py @@ -1,98 +1,258 @@ """ elastic_strain_XZ -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_XZ(Operator): - """Read/compute element nodal component elastic strains XZ shear component (02 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_XZ() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_XZ(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPELXZ", config = config, server = server) - self._inputs = InputsElasticStrainXZ(self) - self._outputs = OutputsElasticStrainXZ(self) - if time_scoping !=None: + """Read/compute element nodal component elastic strains XZ shear + component (02 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_XZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_XZ( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPELXZ", config=config, server=server) + self._inputs = InputsElasticStrainXz(self) + self._outputs = OutputsElasticStrainXz(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains XZ shear component (02 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains XZ shear + component (02 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPELXZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPELXZ", server=server) @property def inputs(self): @@ -100,301 +260,307 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainXZ + inputs : InputsElasticStrainXz """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainXZ + outputs : OutputsElasticStrainXz """ return super().outputs -#internal name: EPELXZ -#scripting name: elastic_strain_XZ -class InputsElasticStrainXZ(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_XZ operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_XZ() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsElasticStrainXz(_Inputs): + """Intermediate class used to connect user inputs to + elastic_strain_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_XZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_XZ._spec().inputs, op) - self._time_scoping = Input(elastic_strain_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elastic_strain_XZ._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elastic_strain_XZ._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_XZ._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + elastic_strain_XZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_XZ._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_XZ._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elastic_strain_XZ._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_XZ._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_XZ._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_XZ._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_XZ._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_XZ._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_XZ._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elastic_strain_XZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsElasticStrainXZ(_Outputs): - """Intermediate class used to get outputs from elastic_strain_XZ operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsElasticStrainXz(_Outputs): + """Intermediate class used to get outputs from + elastic_strain_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_XZ._spec().outputs, op) - self._fields_container = Output(elastic_strain_XZ._spec().output_pin(0), 0, op) + self._fields_container = Output(elastic_strain_XZ._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_XZ() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_Y.py b/ansys/dpf/core/operators/result/elastic_strain_Y.py index f3a3f697b3f..cbd27e24a26 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_Y.py +++ b/ansys/dpf/core/operators/result/elastic_strain_Y.py @@ -1,98 +1,258 @@ """ elastic_strain_Y -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_Y(Operator): - """Read/compute element nodal component elastic strains YY normal component (11 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_Y() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_Y(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPELY", config = config, server = server) + """Read/compute element nodal component elastic strains YY normal + component (11 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_Y( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPELY", config=config, server=server) self._inputs = InputsElasticStrainY(self) self._outputs = OutputsElasticStrainY(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains YY normal component (11 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains YY normal + component (11 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPELY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPELY", server=server) @property def inputs(self): @@ -100,301 +260,305 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainY + inputs : InputsElasticStrainY """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainY + outputs : OutputsElasticStrainY """ return super().outputs -#internal name: EPELY -#scripting name: elastic_strain_Y class InputsElasticStrainY(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_Y operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_Y() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elastic_strain_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_Y._spec().inputs, op) - self._time_scoping = Input(elastic_strain_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elastic_strain_Y._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elastic_strain_Y._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_Y._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(elastic_strain_Y._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_Y._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_Y._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_Y._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elastic_strain_Y._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_Y._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_Y._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_Y._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_Y._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_Y._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elastic_strain_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElasticStrainY(_Outputs): - """Intermediate class used to get outputs from elastic_strain_Y operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_Y() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_Y._spec().outputs, op) - self._fields_container = Output(elastic_strain_Y._spec().output_pin(0), 0, op) + self._fields_container = Output(elastic_strain_Y._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Y() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_YZ.py b/ansys/dpf/core/operators/result/elastic_strain_YZ.py index 8d3ed69e454..bdbb375f97f 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_YZ.py +++ b/ansys/dpf/core/operators/result/elastic_strain_YZ.py @@ -1,98 +1,258 @@ """ elastic_strain_YZ -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_YZ(Operator): - """Read/compute element nodal component elastic strains YZ shear component (12 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_YZ() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_YZ(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPELYZ", config = config, server = server) - self._inputs = InputsElasticStrainYZ(self) - self._outputs = OutputsElasticStrainYZ(self) - if time_scoping !=None: + """Read/compute element nodal component elastic strains YZ shear + component (12 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_YZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_YZ( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPELYZ", config=config, server=server) + self._inputs = InputsElasticStrainYz(self) + self._outputs = OutputsElasticStrainYz(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains YZ shear component (12 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains YZ shear + component (12 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPELYZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPELYZ", server=server) @property def inputs(self): @@ -100,301 +260,307 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainYZ + inputs : InputsElasticStrainYz """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainYZ + outputs : OutputsElasticStrainYz """ return super().outputs -#internal name: EPELYZ -#scripting name: elastic_strain_YZ -class InputsElasticStrainYZ(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_YZ operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_YZ() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsElasticStrainYz(_Inputs): + """Intermediate class used to connect user inputs to + elastic_strain_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_YZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_YZ._spec().inputs, op) - self._time_scoping = Input(elastic_strain_YZ._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elastic_strain_YZ._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_YZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elastic_strain_YZ._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_YZ._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + elastic_strain_YZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_YZ._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_YZ._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elastic_strain_YZ._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_YZ._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_YZ._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_YZ._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_YZ._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_YZ._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_YZ._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_YZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elastic_strain_YZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsElasticStrainYZ(_Outputs): - """Intermediate class used to get outputs from elastic_strain_YZ operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsElasticStrainYz(_Outputs): + """Intermediate class used to get outputs from + elastic_strain_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_YZ._spec().outputs, op) - self._fields_container = Output(elastic_strain_YZ._spec().output_pin(0), 0, op) + self._fields_container = Output(elastic_strain_YZ._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_YZ() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_Z.py b/ansys/dpf/core/operators/result/elastic_strain_Z.py index 2bf223d6f69..b545b855654 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_Z.py +++ b/ansys/dpf/core/operators/result/elastic_strain_Z.py @@ -1,98 +1,258 @@ """ elastic_strain_Z -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_Z(Operator): - """Read/compute element nodal component elastic strains ZZ normal component (22 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_Z() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_Z(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPELZ", config = config, server = server) + """Read/compute element nodal component elastic strains ZZ normal + component (22 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_Z( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPELZ", config=config, server=server) self._inputs = InputsElasticStrainZ(self) self._outputs = OutputsElasticStrainZ(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains ZZ normal component (22 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains ZZ normal + component (22 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPELZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPELZ", server=server) @property def inputs(self): @@ -100,301 +260,305 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainZ + inputs : InputsElasticStrainZ """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainZ + outputs : OutputsElasticStrainZ """ return super().outputs -#internal name: EPELZ -#scripting name: elastic_strain_Z class InputsElasticStrainZ(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_Z operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_Z() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elastic_strain_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_Z._spec().inputs, op) - self._time_scoping = Input(elastic_strain_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elastic_strain_Z._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elastic_strain_Z._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_Z._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(elastic_strain_Z._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_Z._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_Z._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_Z._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elastic_strain_Z._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_Z._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_Z._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_Z._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_Z._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_Z._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elastic_strain_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElasticStrainZ(_Outputs): - """Intermediate class used to get outputs from elastic_strain_Z operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_Z() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_Z._spec().outputs, op) - self._fields_container = Output(elastic_strain_Z._spec().output_pin(0), 0, op) + self._fields_container = Output(elastic_strain_Z._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_Z() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_energy_density.py b/ansys/dpf/core/operators/result/elastic_strain_energy_density.py index fe8bb0c1e75..83d7d11a44f 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_energy_density.py +++ b/ansys/dpf/core/operators/result/elastic_strain_energy_density.py @@ -1,98 +1,274 @@ """ elastic_strain_energy_density -============================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_energy_density(Operator): - """Read/compute element nodal elastic strain energy density by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_energy_density() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_energy_density(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_ELENG", config = config, server = server) + """Read/compute element nodal elastic strain energy density by calling + the readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_energy_density() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_energy_density( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_ELENG", config=config, server=server) self._inputs = InputsElasticStrainEnergyDensity(self) self._outputs = OutputsElasticStrainEnergyDensity(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal elastic strain energy density by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal elastic strain energy density by calling + the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_ELENG") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_ELENG", server=server) @property def inputs(self): @@ -100,301 +276,347 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainEnergyDensity + inputs : InputsElasticStrainEnergyDensity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainEnergyDensity + outputs : OutputsElasticStrainEnergyDensity """ return super().outputs -#internal name: ENL_ELENG -#scripting name: elastic_strain_energy_density class InputsElasticStrainEnergyDensity(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_energy_density operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_energy_density() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elastic_strain_energy_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_energy_density() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_energy_density._spec().inputs, op) - self._time_scoping = Input(elastic_strain_energy_density._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + elastic_strain_energy_density._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_energy_density._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + elastic_strain_energy_density._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_energy_density._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + elastic_strain_energy_density._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_energy_density._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_energy_density._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_energy_density._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + elastic_strain_energy_density._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_energy_density._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_energy_density._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_energy_density._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + elastic_strain_energy_density._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_energy_density._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_energy_density._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_energy_density._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + elastic_strain_energy_density._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + elastic_strain_energy_density._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_energy_density() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsElasticStrainEnergyDensity(_Outputs): - """Intermediate class used to get outputs from elastic_strain_energy_density operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_energy_density() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain_energy_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_energy_density() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_energy_density._spec().outputs, op) - self._fields_container = Output(elastic_strain_energy_density._spec().output_pin(0), 0, op) + self._fields_container = Output( + elastic_strain_energy_density._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_energy_density() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_principal_1.py b/ansys/dpf/core/operators/result/elastic_strain_principal_1.py index 63c6d768054..78dd4a269f4 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_principal_1.py +++ b/ansys/dpf/core/operators/result/elastic_strain_principal_1.py @@ -1,96 +1,253 @@ """ elastic_strain_principal_1 -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_principal_1(Operator): - """Read/compute element nodal component elastic strains 1st principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_principal_1() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_principal_1(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="EPEL1", config = config, server = server) + """Read/compute element nodal component elastic strains 1st principal + component by calling the readers defined by the datasources and + computing its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_principal_1() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_principal_1( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPEL1", config=config, server=server) self._inputs = InputsElasticStrainPrincipal1(self) self._outputs = OutputsElasticStrainPrincipal1(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains 1st principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains 1st principal + component by calling the readers defined by the + datasources and computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPEL1") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPEL1", server=server) @property def inputs(self): @@ -98,299 +255,315 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainPrincipal1 + inputs : InputsElasticStrainPrincipal1 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainPrincipal1 + outputs : OutputsElasticStrainPrincipal1 """ return super().outputs -#internal name: EPEL1 -#scripting name: elastic_strain_principal_1 class InputsElasticStrainPrincipal1(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_principal_1 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_principal_1() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elastic_strain_principal_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_principal_1() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_principal_1._spec().inputs, op) - self._time_scoping = Input(elastic_strain_principal_1._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + elastic_strain_principal_1._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_principal_1._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + elastic_strain_principal_1._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_principal_1._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + elastic_strain_principal_1._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_principal_1._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_principal_1._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_principal_1._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + elastic_strain_principal_1._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_principal_1._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_principal_1._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_principal_1._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_principal_1._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_principal_1._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_principal_1._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_principal_1._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + elastic_strain_principal_1._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElasticStrainPrincipal1(_Outputs): - """Intermediate class used to get outputs from elastic_strain_principal_1 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_principal_1() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain_principal_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_principal_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_principal_1._spec().outputs, op) - self._fields_container = Output(elastic_strain_principal_1._spec().output_pin(0), 0, op) + self._fields_container = Output( + elastic_strain_principal_1._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_1() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_principal_2.py b/ansys/dpf/core/operators/result/elastic_strain_principal_2.py index 1ead3bfb675..5fb4ce8ca18 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_principal_2.py +++ b/ansys/dpf/core/operators/result/elastic_strain_principal_2.py @@ -1,96 +1,253 @@ """ elastic_strain_principal_2 -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_principal_2(Operator): - """Read/compute element nodal component elastic strains 2nd principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_principal_2() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_principal_2(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="EPEL2", config = config, server = server) + """Read/compute element nodal component elastic strains 2nd principal + component by calling the readers defined by the datasources and + computing its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_principal_2() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_principal_2( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPEL2", config=config, server=server) self._inputs = InputsElasticStrainPrincipal2(self) self._outputs = OutputsElasticStrainPrincipal2(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains 2nd principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains 2nd principal + component by calling the readers defined by the + datasources and computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPEL2") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPEL2", server=server) @property def inputs(self): @@ -98,299 +255,315 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainPrincipal2 + inputs : InputsElasticStrainPrincipal2 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainPrincipal2 + outputs : OutputsElasticStrainPrincipal2 """ return super().outputs -#internal name: EPEL2 -#scripting name: elastic_strain_principal_2 class InputsElasticStrainPrincipal2(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_principal_2 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_principal_2() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elastic_strain_principal_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_principal_2() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_principal_2._spec().inputs, op) - self._time_scoping = Input(elastic_strain_principal_2._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + elastic_strain_principal_2._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_principal_2._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + elastic_strain_principal_2._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_principal_2._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + elastic_strain_principal_2._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_principal_2._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_principal_2._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_principal_2._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + elastic_strain_principal_2._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_principal_2._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_principal_2._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_principal_2._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_principal_2._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_principal_2._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_principal_2._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_principal_2._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + elastic_strain_principal_2._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElasticStrainPrincipal2(_Outputs): - """Intermediate class used to get outputs from elastic_strain_principal_2 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_principal_2() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain_principal_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_principal_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_principal_2._spec().outputs, op) - self._fields_container = Output(elastic_strain_principal_2._spec().output_pin(0), 0, op) + self._fields_container = Output( + elastic_strain_principal_2._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_2() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_principal_3.py b/ansys/dpf/core/operators/result/elastic_strain_principal_3.py index 69cd656037f..f49bc57a45b 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_principal_3.py +++ b/ansys/dpf/core/operators/result/elastic_strain_principal_3.py @@ -1,96 +1,253 @@ """ elastic_strain_principal_3 -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elastic_strain_principal_3(Operator): - """Read/compute element nodal component elastic strains 3rd principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_principal_3() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_principal_3(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="EPEL3", config = config, server = server) + """Read/compute element nodal component elastic strains 3rd principal + component by calling the readers defined by the datasources and + computing its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_principal_3() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_principal_3( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPEL3", config=config, server=server) self._inputs = InputsElasticStrainPrincipal3(self) self._outputs = OutputsElasticStrainPrincipal3(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component elastic strains 3rd principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component elastic strains 3rd principal + component by calling the readers defined by the + datasources and computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPEL3") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPEL3", server=server) @property def inputs(self): @@ -98,299 +255,315 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainPrincipal3 + inputs : InputsElasticStrainPrincipal3 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainPrincipal3 + outputs : OutputsElasticStrainPrincipal3 """ return super().outputs -#internal name: EPEL3 -#scripting name: elastic_strain_principal_3 class InputsElasticStrainPrincipal3(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_principal_3 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_principal_3() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elastic_strain_principal_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_principal_3() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_principal_3._spec().inputs, op) - self._time_scoping = Input(elastic_strain_principal_3._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + elastic_strain_principal_3._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elastic_strain_principal_3._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + elastic_strain_principal_3._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elastic_strain_principal_3._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + elastic_strain_principal_3._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_principal_3._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_principal_3._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_principal_3._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + elastic_strain_principal_3._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elastic_strain_principal_3._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elastic_strain_principal_3._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elastic_strain_principal_3._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elastic_strain_principal_3._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(elastic_strain_principal_3._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + elastic_strain_principal_3._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(elastic_strain_principal_3._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + elastic_strain_principal_3._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElasticStrainPrincipal3(_Outputs): - """Intermediate class used to get outputs from elastic_strain_principal_3 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_principal_3() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain_principal_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_principal_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_principal_3._spec().outputs, op) - self._fields_container = Output(elastic_strain_principal_3._spec().output_pin(0), 0, op) + self._fields_container = Output( + elastic_strain_principal_3._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_principal_3() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py b/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py index 4018c133753..f268cdbc10f 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py +++ b/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py @@ -1,72 +1,133 @@ """ elastic_strain_rotation_by_euler_nodes -====================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class elastic_strain_rotation_by_euler_nodes(Operator): - """read Euler angles on elements from the rst file and rotate the fields in the fieldsContainer. - - available inputs: - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes(fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="mapdl::rst::EPEL_rotation_by_euler_nodes", config = config, server = server) + """read Euler angles on elements from the rst file and rotate the fields + in the fieldsContainer. + + Parameters + ---------- + fields_container : FieldsContainer, optional + streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper, optional + data_sources : DataSources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes( + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__( + name="mapdl::rst::EPEL_rotation_by_euler_nodes", + config=config, + server=server, + ) self._inputs = InputsElasticStrainRotationByEulerNodes(self) self._outputs = OutputsElasticStrainRotationByEulerNodes(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""read Euler angles on elements from the rst file and rotate the fields in the fieldsContainer.""", - map_input_pin_spec={ - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document=""""""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """read Euler angles on elements from the rst file and rotate the fields + in the fieldsContainer.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="streams_container", + type_names=[ + "streams_container", + "stream", + "class dataProcessing::CRstFileWrapper", + ], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::EPEL_rotation_by_euler_nodes") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="mapdl::rst::EPEL_rotation_by_euler_nodes", server=server + ) @property def inputs(self): @@ -74,139 +135,140 @@ def inputs(self): Returns -------- - inputs : InputsElasticStrainRotationByEulerNodes + inputs : InputsElasticStrainRotationByEulerNodes """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElasticStrainRotationByEulerNodes + outputs : OutputsElasticStrainRotationByEulerNodes """ return super().outputs -#internal name: mapdl::rst::EPEL_rotation_by_euler_nodes -#scripting name: elastic_strain_rotation_by_euler_nodes class InputsElasticStrainRotationByEulerNodes(_Inputs): - """Intermediate class used to connect user inputs to elastic_strain_rotation_by_euler_nodes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + elastic_strain_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(elastic_strain_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input(elastic_strain_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + elastic_strain_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(elastic_strain_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elastic_strain_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elastic_strain_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + elastic_strain_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsElasticStrainRotationByEulerNodes(_Outputs): - """Intermediate class used to get outputs from elastic_strain_rotation_by_euler_nodes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elastic_strain_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elastic_strain_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output(elastic_strain_rotation_by_euler_nodes._spec().output_pin(0), 0, op) + self._fields_container = Output( + elastic_strain_rotation_by_euler_nodes._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/electric_field.py b/ansys/dpf/core/operators/result/electric_field.py index dc389b30540..8d162c9b26b 100644 --- a/ansys/dpf/core/operators/result/electric_field.py +++ b/ansys/dpf/core/operators/result/electric_field.py @@ -1,98 +1,274 @@ """ electric_field -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class electric_field(Operator): - """Read/compute electric field by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.electric_field() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.electric_field(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EF", config = config, server = server) + """Read/compute electric field by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.electric_field() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.electric_field( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="EF", config=config, server=server) self._inputs = InputsElectricField(self) self._outputs = OutputsElectricField(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute electric field by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute electric field by calling the readers defined by the + datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EF") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EF", server=server) @property def inputs(self): @@ -100,301 +276,327 @@ def inputs(self): Returns -------- - inputs : InputsElectricField + inputs : InputsElectricField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElectricField + outputs : OutputsElectricField """ return super().outputs -#internal name: EF -#scripting name: electric_field class InputsElectricField(_Inputs): - """Intermediate class used to connect user inputs to electric_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.electric_field() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + electric_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_field() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(electric_field._spec().inputs, op) - self._time_scoping = Input(electric_field._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(electric_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(electric_field._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(electric_field._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(electric_field._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(electric_field._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(electric_field._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(electric_field._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(electric_field._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(electric_field._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(electric_field._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + electric_field._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_field._spec().input_pin(7), 7, op, -1) + self._mesh = Input(electric_field._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(electric_field._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(electric_field._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(electric_field._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(electric_field._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(electric_field._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_field() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsElectricField(_Outputs): - """Intermediate class used to get outputs from electric_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.electric_field() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + electric_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_field() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(electric_field._spec().outputs, op) - self._fields_container = Output(electric_field._spec().output_pin(0), 0, op) + self._fields_container = Output(electric_field._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_field() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/electric_flux_density.py b/ansys/dpf/core/operators/result/electric_flux_density.py new file mode 100644 index 00000000000..f573b8dbd8e --- /dev/null +++ b/ansys/dpf/core/operators/result/electric_flux_density.py @@ -0,0 +1,620 @@ +""" +electric_flux_density +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class electric_flux_density(Operator): + """Read/compute Electric flux density by calling the readers defined by + the datasources. Regarding the requested location and the input + mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.electric_flux_density() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.electric_flux_density( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="EFD", config=config, server=server) + self._inputs = InputsElectricFluxDensity(self) + self._outputs = OutputsElectricFluxDensity(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) + + @staticmethod + def _spec(): + description = """Read/compute Electric flux density by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EFD", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsElectricFluxDensity + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsElectricFluxDensity + """ + return super().outputs + + +class InputsElectricFluxDensity(_Inputs): + """Intermediate class used to connect user inputs to + electric_flux_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + """ + + def __init__(self, op: Operator): + super().__init__(electric_flux_density._spec().inputs, op) + self._time_scoping = Input( + electric_flux_density._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + electric_flux_density._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + electric_flux_density._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + electric_flux_density._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + electric_flux_density._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + electric_flux_density._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(electric_flux_density._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._requested_location = Input( + electric_flux_density._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._read_cyclic = Input( + electric_flux_density._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + self._read_beams = Input( + electric_flux_density._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Requested location nodal, elemental or + elementalnodal + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + +class OutputsElectricFluxDensity(_Outputs): + """Intermediate class used to get outputs from + electric_flux_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(electric_flux_density._spec().outputs, op) + self._fields_container = Output( + electric_flux_density._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_flux_density() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/electric_potential.py b/ansys/dpf/core/operators/result/electric_potential.py index f8040469050..5f5333bf210 100644 --- a/ansys/dpf/core/operators/result/electric_potential.py +++ b/ansys/dpf/core/operators/result/electric_potential.py @@ -1,92 +1,238 @@ """ electric_potential -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class electric_potential(Operator): - """Read/compute electric Potential by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.electric_potential() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.electric_potential(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="VOLT", config = config, server = server) + """Read/compute electric Potential by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.electric_potential() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.electric_potential( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="VOLT", config=config, server=server) self._inputs = InputsElectricPotential(self) self._outputs = OutputsElectricPotential(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute electric Potential by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute electric Potential by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "VOLT") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="VOLT", server=server) @property def inputs(self): @@ -94,275 +240,281 @@ def inputs(self): Returns -------- - inputs : InputsElectricPotential + inputs : InputsElectricPotential """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElectricPotential + outputs : OutputsElectricPotential """ return super().outputs -#internal name: VOLT -#scripting name: electric_potential class InputsElectricPotential(_Inputs): - """Intermediate class used to connect user inputs to electric_potential operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.electric_potential() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + electric_potential operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_potential() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(electric_potential._spec().inputs, op) - self._time_scoping = Input(electric_potential._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(electric_potential._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(electric_potential._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(electric_potential._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(electric_potential._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + electric_potential._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(electric_potential._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + electric_potential._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(electric_potential._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(electric_potential._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(electric_potential._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + electric_potential._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(electric_potential._spec().input_pin(7), 7, op, -1) + self._mesh = Input(electric_potential._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(electric_potential._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(electric_potential._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElectricPotential(_Outputs): - """Intermediate class used to get outputs from electric_potential operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.electric_potential() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + electric_potential operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.electric_potential() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(electric_potential._spec().outputs, op) - self._fields_container = Output(electric_potential._spec().output_pin(0), 0, op) + self._fields_container = Output(electric_potential._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.electric_potential() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/element_centroids.py b/ansys/dpf/core/operators/result/element_centroids.py index 4f80901d64e..191385cb950 100644 --- a/ansys/dpf/core/operators/result/element_centroids.py +++ b/ansys/dpf/core/operators/result/element_centroids.py @@ -1,92 +1,238 @@ """ element_centroids -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class element_centroids(Operator): - """Read/compute coordinate of the elemental centroids by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.element_centroids() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.element_centroids(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="centroids", config = config, server = server) + """Read/compute coordinate of the elemental centroids by calling the + readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.element_centroids() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.element_centroids( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="centroids", config=config, server=server) self._inputs = InputsElementCentroids(self) self._outputs = OutputsElementCentroids(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute coordinate of the elemental centroids by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute coordinate of the elemental centroids by calling the + readers defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "centroids") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="centroids", server=server) @property def inputs(self): @@ -94,275 +240,281 @@ def inputs(self): Returns -------- - inputs : InputsElementCentroids + inputs : InputsElementCentroids """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementCentroids + outputs : OutputsElementCentroids """ return super().outputs -#internal name: centroids -#scripting name: element_centroids class InputsElementCentroids(_Inputs): - """Intermediate class used to connect user inputs to element_centroids operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.element_centroids() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + element_centroids operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.element_centroids() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(element_centroids._spec().inputs, op) - self._time_scoping = Input(element_centroids._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(element_centroids._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(element_centroids._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(element_centroids._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(element_centroids._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + element_centroids._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(element_centroids._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + element_centroids._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(element_centroids._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(element_centroids._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(element_centroids._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + element_centroids._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_centroids._spec().input_pin(7), 7, op, -1) + self._mesh = Input(element_centroids._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(element_centroids._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(element_centroids._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElementCentroids(_Outputs): - """Intermediate class used to get outputs from element_centroids operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.element_centroids() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + element_centroids operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.element_centroids() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(element_centroids._spec().outputs, op) - self._fields_container = Output(element_centroids._spec().output_pin(0), 0, op) + self._fields_container = Output(element_centroids._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_centroids() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/element_nodal_forces.py b/ansys/dpf/core/operators/result/element_nodal_forces.py index d645dae74b8..348111fbba3 100644 --- a/ansys/dpf/core/operators/result/element_nodal_forces.py +++ b/ansys/dpf/core/operators/result/element_nodal_forces.py @@ -1,98 +1,274 @@ """ element_nodal_forces -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class element_nodal_forces(Operator): - """Read/compute element nodal forces by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.element_nodal_forces() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.element_nodal_forces(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENF", config = config, server = server) + """Read/compute element nodal forces by calling the readers defined by + the datasources. Regarding the requested location and the input + mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.element_nodal_forces() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.element_nodal_forces( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENF", config=config, server=server) self._inputs = InputsElementNodalForces(self) self._outputs = OutputsElementNodalForces(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal forces by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal forces by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENF") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENF", server=server) @property def inputs(self): @@ -100,301 +276,337 @@ def inputs(self): Returns -------- - inputs : InputsElementNodalForces + inputs : InputsElementNodalForces """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementNodalForces + outputs : OutputsElementNodalForces """ return super().outputs -#internal name: ENF -#scripting name: element_nodal_forces class InputsElementNodalForces(_Inputs): - """Intermediate class used to connect user inputs to element_nodal_forces operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.element_nodal_forces() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + element_nodal_forces operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.element_nodal_forces() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(element_nodal_forces._spec().inputs, op) - self._time_scoping = Input(element_nodal_forces._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(element_nodal_forces._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(element_nodal_forces._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(element_nodal_forces._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(element_nodal_forces._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + element_nodal_forces._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(element_nodal_forces._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + element_nodal_forces._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(element_nodal_forces._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(element_nodal_forces._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(element_nodal_forces._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + element_nodal_forces._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_nodal_forces._spec().input_pin(7), 7, op, -1) + self._mesh = Input(element_nodal_forces._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(element_nodal_forces._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + element_nodal_forces._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(element_nodal_forces._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + element_nodal_forces._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input(element_nodal_forces._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.element_nodal_forces() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsElementNodalForces(_Outputs): - """Intermediate class used to get outputs from element_nodal_forces operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.element_nodal_forces() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + element_nodal_forces operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.element_nodal_forces() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(element_nodal_forces._spec().outputs, op) - self._fields_container = Output(element_nodal_forces._spec().output_pin(0), 0, op) + self._fields_container = Output( + element_nodal_forces._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_nodal_forces() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/element_orientations.py b/ansys/dpf/core/operators/result/element_orientations.py index 16829af0188..4f0e0a59f4b 100644 --- a/ansys/dpf/core/operators/result/element_orientations.py +++ b/ansys/dpf/core/operators/result/element_orientations.py @@ -1,98 +1,274 @@ """ element_orientations -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class element_orientations(Operator): - """Read/compute element output orientations by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.element_orientations() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.element_orientations(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EUL", config = config, server = server) + """Read/compute element output orientations by calling the readers + defined by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.element_orientations() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.element_orientations( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="EUL", config=config, server=server) self._inputs = InputsElementOrientations(self) self._outputs = OutputsElementOrientations(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element output orientations by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element output orientations by calling the readers + defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EUL") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EUL", server=server) @property def inputs(self): @@ -100,301 +276,337 @@ def inputs(self): Returns -------- - inputs : InputsElementOrientations + inputs : InputsElementOrientations """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementOrientations + outputs : OutputsElementOrientations """ return super().outputs -#internal name: EUL -#scripting name: element_orientations class InputsElementOrientations(_Inputs): - """Intermediate class used to connect user inputs to element_orientations operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.element_orientations() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + element_orientations operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.element_orientations() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(element_orientations._spec().inputs, op) - self._time_scoping = Input(element_orientations._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(element_orientations._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(element_orientations._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(element_orientations._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(element_orientations._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + element_orientations._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(element_orientations._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + element_orientations._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(element_orientations._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(element_orientations._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(element_orientations._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + element_orientations._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(element_orientations._spec().input_pin(7), 7, op, -1) + self._mesh = Input(element_orientations._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(element_orientations._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + element_orientations._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(element_orientations._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + element_orientations._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input(element_orientations._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.element_orientations() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsElementOrientations(_Outputs): - """Intermediate class used to get outputs from element_orientations operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.element_orientations() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + element_orientations operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.element_orientations() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(element_orientations._spec().outputs, op) - self._fields_container = Output(element_orientations._spec().output_pin(0), 0, op) + self._fields_container = Output( + element_orientations._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.element_orientations() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elemental_heat_generation.py b/ansys/dpf/core/operators/result/elemental_heat_generation.py new file mode 100644 index 00000000000..06d76b058a0 --- /dev/null +++ b/ansys/dpf/core/operators/result/elemental_heat_generation.py @@ -0,0 +1,530 @@ +""" +elemental_heat_generation +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class elemental_heat_generation(Operator): + """Read/compute Elemental Heat Generation by calling the readers defined + by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elemental_heat_generation() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elemental_heat_generation( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EHC", config=config, server=server) + self._inputs = InputsElementalHeatGeneration(self) + self._outputs = OutputsElementalHeatGeneration(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute Elemental Heat Generation by calling the readers defined + by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EHC", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsElementalHeatGeneration + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsElementalHeatGeneration + """ + return super().outputs + + +class InputsElementalHeatGeneration(_Inputs): + """Intermediate class used to connect user inputs to + elemental_heat_generation operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(elemental_heat_generation._spec().inputs, op) + self._time_scoping = Input( + elemental_heat_generation._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + elemental_heat_generation._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + elemental_heat_generation._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + elemental_heat_generation._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + elemental_heat_generation._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + elemental_heat_generation._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(elemental_heat_generation._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + elemental_heat_generation._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsElementalHeatGeneration(_Outputs): + """Intermediate class used to get outputs from + elemental_heat_generation operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(elemental_heat_generation._spec().outputs, op) + self._fields_container = Output( + elemental_heat_generation._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_heat_generation() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/elemental_mass.py b/ansys/dpf/core/operators/result/elemental_mass.py index 8df7fffc820..1ea808cd348 100644 --- a/ansys/dpf/core/operators/result/elemental_mass.py +++ b/ansys/dpf/core/operators/result/elemental_mass.py @@ -1,92 +1,238 @@ """ elemental_mass -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elemental_mass(Operator): - """Read/compute element mass by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elemental_mass() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elemental_mass(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ElementalMass", config = config, server = server) + """Read/compute element mass by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elemental_mass() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elemental_mass( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ElementalMass", config=config, server=server) self._inputs = InputsElementalMass(self) self._outputs = OutputsElementalMass(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element mass by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element mass by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ElementalMass") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ElementalMass", server=server) @property def inputs(self): @@ -94,275 +240,277 @@ def inputs(self): Returns -------- - inputs : InputsElementalMass + inputs : InputsElementalMass """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalMass + outputs : OutputsElementalMass """ return super().outputs -#internal name: ElementalMass -#scripting name: elemental_mass class InputsElementalMass(_Inputs): - """Intermediate class used to connect user inputs to elemental_mass operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elemental_mass() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elemental_mass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_mass() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elemental_mass._spec().inputs, op) - self._time_scoping = Input(elemental_mass._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elemental_mass._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elemental_mass._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elemental_mass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elemental_mass._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(elemental_mass._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(elemental_mass._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(elemental_mass._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(elemental_mass._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elemental_mass._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elemental_mass._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elemental_mass._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elemental_mass._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elemental_mass._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(elemental_mass._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elemental_mass._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElementalMass(_Outputs): - """Intermediate class used to get outputs from elemental_mass operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elemental_mass() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_mass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_mass() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elemental_mass._spec().outputs, op) - self._fields_container = Output(elemental_mass._spec().output_pin(0), 0, op) + self._fields_container = Output(elemental_mass._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_mass() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/elemental_volume.py b/ansys/dpf/core/operators/result/elemental_volume.py index 927bdd3877d..a3d7864aad5 100644 --- a/ansys/dpf/core/operators/result/elemental_volume.py +++ b/ansys/dpf/core/operators/result/elemental_volume.py @@ -1,92 +1,238 @@ """ elemental_volume -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class elemental_volume(Operator): - """Read/compute element volume by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.elemental_volume() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.elemental_volume(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ENG_VOL", config = config, server = server) + """Read/compute element volume by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.elemental_volume() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.elemental_volume( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ENG_VOL", config=config, server=server) self._inputs = InputsElementalVolume(self) self._outputs = OutputsElementalVolume(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element volume by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element volume by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENG_VOL") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENG_VOL", server=server) @property def inputs(self): @@ -94,275 +240,279 @@ def inputs(self): Returns -------- - inputs : InputsElementalVolume + inputs : InputsElementalVolume """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalVolume + outputs : OutputsElementalVolume """ return super().outputs -#internal name: ENG_VOL -#scripting name: elemental_volume class InputsElementalVolume(_Inputs): - """Intermediate class used to connect user inputs to elemental_volume operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elemental_volume() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + elemental_volume operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_volume() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(elemental_volume._spec().inputs, op) - self._time_scoping = Input(elemental_volume._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(elemental_volume._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(elemental_volume._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(elemental_volume._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(elemental_volume._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(elemental_volume._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(elemental_volume._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + elemental_volume._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(elemental_volume._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(elemental_volume._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(elemental_volume._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + elemental_volume._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(elemental_volume._spec().input_pin(7), 7, op, -1) + self._mesh = Input(elemental_volume._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(elemental_volume._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(elemental_volume._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsElementalVolume(_Outputs): - """Intermediate class used to get outputs from elemental_volume operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.elemental_volume() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + elemental_volume operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.elemental_volume() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(elemental_volume._spec().outputs, op) - self._fields_container = Output(elemental_volume._spec().output_pin(0), 0, op) + self._fields_container = Output(elemental_volume._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.elemental_volume() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py b/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py index dbac31ff036..99a3142df4d 100644 --- a/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py +++ b/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py @@ -1,72 +1,131 @@ """ enf_rotation_by_euler_nodes -=========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class enf_rotation_by_euler_nodes(Operator): - """read Euler angles on elements from the rst file and rotate the fields in the fieldsContainer. - - available inputs: - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.enf_rotation_by_euler_nodes(fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="ENF_rotation_by_euler_nodes", config = config, server = server) + """read Euler angles on elements from the rst file and rotate the fields + in the fieldsContainer. + + Parameters + ---------- + fields_container : FieldsContainer, optional + streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper, optional + data_sources : DataSources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.enf_rotation_by_euler_nodes( + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__( + name="ENF_rotation_by_euler_nodes", config=config, server=server + ) self._inputs = InputsEnfRotationByEulerNodes(self) self._outputs = OutputsEnfRotationByEulerNodes(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""read Euler angles on elements from the rst file and rotate the fields in the fieldsContainer.""", - map_input_pin_spec={ - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document=""""""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """read Euler angles on elements from the rst file and rotate the fields + in the fieldsContainer.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="streams_container", + type_names=[ + "streams_container", + "stream", + "class dataProcessing::CRstFileWrapper", + ], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENF_rotation_by_euler_nodes") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="ENF_rotation_by_euler_nodes", server=server + ) @property def inputs(self): @@ -74,139 +133,140 @@ def inputs(self): Returns -------- - inputs : InputsEnfRotationByEulerNodes + inputs : InputsEnfRotationByEulerNodes """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEnfRotationByEulerNodes + outputs : OutputsEnfRotationByEulerNodes """ return super().outputs -#internal name: ENF_rotation_by_euler_nodes -#scripting name: enf_rotation_by_euler_nodes class InputsEnfRotationByEulerNodes(_Inputs): - """Intermediate class used to connect user inputs to enf_rotation_by_euler_nodes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + enf_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(enf_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input(enf_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + enf_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(enf_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + enf_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(enf_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + enf_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsEnfRotationByEulerNodes(_Outputs): - """Intermediate class used to get outputs from enf_rotation_by_euler_nodes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + enf_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(enf_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output(enf_rotation_by_euler_nodes._spec().output_pin(0), 0, op) + self._fields_container = Output( + enf_rotation_by_euler_nodes._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.enf_rotation_by_euler_nodes() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/equivalent_mass.py b/ansys/dpf/core/operators/result/equivalent_mass.py new file mode 100644 index 00000000000..8db944dbdc6 --- /dev/null +++ b/ansys/dpf/core/operators/result/equivalent_mass.py @@ -0,0 +1,516 @@ +""" +equivalent_mass +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class equivalent_mass(Operator): + """Read/compute equivalent dof mass by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.equivalent_mass() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.equivalent_mass( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="equivalent_mass", config=config, server=server) + self._inputs = InputsEquivalentMass(self) + self._outputs = OutputsEquivalentMass(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute equivalent dof mass by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="equivalent_mass", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsEquivalentMass + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsEquivalentMass + """ + return super().outputs + + +class InputsEquivalentMass(_Inputs): + """Intermediate class used to connect user inputs to + equivalent_mass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(equivalent_mass._spec().inputs, op) + self._time_scoping = Input(equivalent_mass._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input(equivalent_mass._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input(equivalent_mass._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._fields_container) + self._streams_container = Input(equivalent_mass._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._streams_container) + self._data_sources = Input(equivalent_mass._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + equivalent_mass._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(equivalent_mass._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input(equivalent_mass._spec().input_pin(14), 14, op, -1) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsEquivalentMass(_Outputs): + """Intermediate class used to get outputs from + equivalent_mass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(equivalent_mass._spec().outputs, op) + self._fields_container = Output(equivalent_mass._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_mass() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/equivalent_radiated_power.py b/ansys/dpf/core/operators/result/equivalent_radiated_power.py index cddfab2b2ce..fccb584ab44 100644 --- a/ansys/dpf/core/operators/result/equivalent_radiated_power.py +++ b/ansys/dpf/core/operators/result/equivalent_radiated_power.py @@ -1,72 +1,128 @@ """ equivalent_radiated_power -========================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "result" category -""" class equivalent_radiated_power(Operator): """Compute the Equivalent Radiated Power (ERP) - available inputs: - - fields_container (FieldsContainer) - - meshed_region (MeshedRegion, MeshesContainer) (optional) - - time_scoping (int, list, Scoping) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.equivalent_radiated_power() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> my_time_scoping = int() - >>> op.inputs.time_scoping.connect(my_time_scoping) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.equivalent_radiated_power(fields_container=my_fields_container,meshed_region=my_meshed_region,time_scoping=my_time_scoping) + Parameters + ---------- + fields_container : FieldsContainer + abstract_meshed_region : MeshedRegion or MeshesContainer, optional + The mesh region in this pin have to be + boundary or skin mesh + time_scoping : int or Scoping, optional + Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.equivalent_radiated_power() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_time_scoping = int() + >>> op.inputs.time_scoping.connect(my_time_scoping) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.equivalent_radiated_power( + ... fields_container=my_fields_container, + ... abstract_meshed_region=my_abstract_meshed_region, + ... time_scoping=my_time_scoping, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, meshed_region=None, time_scoping=None, config=None, server=None): - super().__init__(name="ERP", config = config, server = server) + def __init__( + self, + fields_container=None, + abstract_meshed_region=None, + time_scoping=None, + config=None, + server=None, + ): + super().__init__(name="ERP", config=config, server=server) self._inputs = InputsEquivalentRadiatedPower(self) self._outputs = OutputsEquivalentRadiatedPower(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if meshed_region !=None: - self.inputs.meshed_region.connect(meshed_region) - if time_scoping !=None: + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) @staticmethod def _spec(): - spec = Specification(description="""Compute the Equivalent Radiated Power (ERP)""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "meshed_region", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""the mesh region in this pin have to be boundary or skin mesh"""), - 2 : PinSpecification(name = "time_scoping", type_names=["int32","vector","scoping"], optional=True, document="""load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Compute the Equivalent Radiated Power (ERP)""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""The mesh region in this pin have to be + boundary or skin mesh""", + ), + 2: PinSpecification( + name="time_scoping", + type_names=["int32", "vector", "scoping"], + optional=True, + document="""Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ERP") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ERP", server=server) @property def inputs(self): @@ -74,143 +130,146 @@ def inputs(self): Returns -------- - inputs : InputsEquivalentRadiatedPower + inputs : InputsEquivalentRadiatedPower """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEquivalentRadiatedPower + outputs : OutputsEquivalentRadiatedPower """ return super().outputs -#internal name: ERP -#scripting name: equivalent_radiated_power class InputsEquivalentRadiatedPower(_Inputs): - """Intermediate class used to connect user inputs to equivalent_radiated_power operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.equivalent_radiated_power() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> my_time_scoping = int() - >>> op.inputs.time_scoping.connect(my_time_scoping) + """Intermediate class used to connect user inputs to + equivalent_radiated_power operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_radiated_power() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_time_scoping = int() + >>> op.inputs.time_scoping.connect(my_time_scoping) """ + def __init__(self, op: Operator): super().__init__(equivalent_radiated_power._spec().inputs, op) - self._fields_container = Input(equivalent_radiated_power._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + equivalent_radiated_power._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._meshed_region = Input(equivalent_radiated_power._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._meshed_region) - self._time_scoping = Input(equivalent_radiated_power._spec().input_pin(2), 2, op, -1) + self._abstract_meshed_region = Input( + equivalent_radiated_power._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._time_scoping = Input( + equivalent_radiated_power._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._time_scoping) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.equivalent_radiated_power() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property - def meshed_region(self): - """Allows to connect meshed_region input to the operator + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. - - pindoc: the mesh region in this pin have to be boundary or skin mesh + The mesh region in this pin have to be + boundary or skin mesh Parameters ---------- - my_meshed_region : MeshedRegion, MeshesContainer, + my_abstract_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.equivalent_radiated_power() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> #or - >>> op.inputs.meshed_region(my_meshed_region) - + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) """ - return self._meshed_region + return self._abstract_meshed_region @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping + Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping Parameters ---------- - my_time_scoping : int, list, Scoping, + my_time_scoping : int or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.equivalent_radiated_power() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping + class OutputsEquivalentRadiatedPower(_Outputs): - """Intermediate class used to get outputs from equivalent_radiated_power operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.equivalent_radiated_power() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + equivalent_radiated_power operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_radiated_power() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(equivalent_radiated_power._spec().outputs, op) - self._fields_container = Output(equivalent_radiated_power._spec().output_pin(0), 0, op) + self._fields_container = Output( + equivalent_radiated_power._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.equivalent_radiated_power() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/eqv_stress_parameter.py b/ansys/dpf/core/operators/result/eqv_stress_parameter.py index da85d37e1b0..d7e8c3856cd 100644 --- a/ansys/dpf/core/operators/result/eqv_stress_parameter.py +++ b/ansys/dpf/core/operators/result/eqv_stress_parameter.py @@ -1,98 +1,274 @@ """ eqv_stress_parameter -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class eqv_stress_parameter(Operator): - """Read/compute element nodal equivalent stress parameter by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.eqv_stress_parameter() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.eqv_stress_parameter(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_SEPL", config = config, server = server) + """Read/compute element nodal equivalent stress parameter by calling the + readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.eqv_stress_parameter() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.eqv_stress_parameter( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_SEPL", config=config, server=server) self._inputs = InputsEqvStressParameter(self) self._outputs = OutputsEqvStressParameter(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal equivalent stress parameter by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal equivalent stress parameter by calling the + readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_SEPL") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_SEPL", server=server) @property def inputs(self): @@ -100,301 +276,337 @@ def inputs(self): Returns -------- - inputs : InputsEqvStressParameter + inputs : InputsEqvStressParameter """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEqvStressParameter + outputs : OutputsEqvStressParameter """ return super().outputs -#internal name: ENL_SEPL -#scripting name: eqv_stress_parameter class InputsEqvStressParameter(_Inputs): - """Intermediate class used to connect user inputs to eqv_stress_parameter operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.eqv_stress_parameter() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + eqv_stress_parameter operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.eqv_stress_parameter() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(eqv_stress_parameter._spec().inputs, op) - self._time_scoping = Input(eqv_stress_parameter._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(eqv_stress_parameter._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(eqv_stress_parameter._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(eqv_stress_parameter._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(eqv_stress_parameter._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + eqv_stress_parameter._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(eqv_stress_parameter._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + eqv_stress_parameter._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(eqv_stress_parameter._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(eqv_stress_parameter._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(eqv_stress_parameter._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + eqv_stress_parameter._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(eqv_stress_parameter._spec().input_pin(7), 7, op, -1) + self._mesh = Input(eqv_stress_parameter._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(eqv_stress_parameter._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + eqv_stress_parameter._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(eqv_stress_parameter._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + eqv_stress_parameter._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input(eqv_stress_parameter._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.eqv_stress_parameter() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsEqvStressParameter(_Outputs): - """Intermediate class used to get outputs from eqv_stress_parameter operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.eqv_stress_parameter() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + eqv_stress_parameter operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.eqv_stress_parameter() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(eqv_stress_parameter._spec().outputs, op) - self._fields_container = Output(eqv_stress_parameter._spec().output_pin(0), 0, op) + self._fields_container = Output( + eqv_stress_parameter._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.eqv_stress_parameter() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/euler_load_buckling.py b/ansys/dpf/core/operators/result/euler_load_buckling.py new file mode 100644 index 00000000000..a0f622d0d96 --- /dev/null +++ b/ansys/dpf/core/operators/result/euler_load_buckling.py @@ -0,0 +1,386 @@ +""" +euler_load_buckling +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class euler_load_buckling(Operator): + """Computing Euler's Critical Load. Formula: Ncr = n*E*I*pi*pi /(L*L) + + Parameters + ---------- + field_beam_end_condition : DataSources or Field + this pin contains file csv or field of + beam's end condition added by the + user. if there's no file added, it + would take value of all beam's end + condition as 1. + field_beam_moment_inertia : Field + Field of beam's moment inertia + field_beam_young_modulus : Field + Field of beam's young modulus + field_beam_length : Field + Field of beam's length + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.euler_load_buckling() + + >>> # Make input connections + >>> my_field_beam_end_condition = dpf.DataSources() + >>> op.inputs.field_beam_end_condition.connect(my_field_beam_end_condition) + >>> my_field_beam_moment_inertia = dpf.Field() + >>> op.inputs.field_beam_moment_inertia.connect(my_field_beam_moment_inertia) + >>> my_field_beam_young_modulus = dpf.Field() + >>> op.inputs.field_beam_young_modulus.connect(my_field_beam_young_modulus) + >>> my_field_beam_length = dpf.Field() + >>> op.inputs.field_beam_length.connect(my_field_beam_length) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.euler_load_buckling( + ... field_beam_end_condition=my_field_beam_end_condition, + ... field_beam_moment_inertia=my_field_beam_moment_inertia, + ... field_beam_young_modulus=my_field_beam_young_modulus, + ... field_beam_length=my_field_beam_length, + ... ) + + >>> # Get output data + >>> result_field_euler_critical_load = op.outputs.field_euler_critical_load() + >>> result_field_euler_critical_load_yy = op.outputs.field_euler_critical_load_yy() + >>> result_field_euler_critical_load_zz = op.outputs.field_euler_critical_load_zz() + """ + + def __init__( + self, + field_beam_end_condition=None, + field_beam_moment_inertia=None, + field_beam_young_modulus=None, + field_beam_length=None, + config=None, + server=None, + ): + super().__init__(name="euler_load_buckling", config=config, server=server) + self._inputs = InputsEulerLoadBuckling(self) + self._outputs = OutputsEulerLoadBuckling(self) + if field_beam_end_condition is not None: + self.inputs.field_beam_end_condition.connect(field_beam_end_condition) + if field_beam_moment_inertia is not None: + self.inputs.field_beam_moment_inertia.connect(field_beam_moment_inertia) + if field_beam_young_modulus is not None: + self.inputs.field_beam_young_modulus.connect(field_beam_young_modulus) + if field_beam_length is not None: + self.inputs.field_beam_length.connect(field_beam_length) + + @staticmethod + def _spec(): + description = ( + """Computing Euler's Critical Load. Formula: Ncr = n*E*I*pi*pi /(L*L)""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 5: PinSpecification( + name="field_beam_end_condition", + type_names=["data_sources", "field"], + optional=False, + document=""" this pin contains file csv or field of + beam's end condition added by the + user. if there's no file added, it + would take value of all beam's end + condition as 1.""", + ), + 6: PinSpecification( + name="field_beam_moment_inertia", + type_names=["field"], + optional=False, + document="""Field of beam's moment inertia""", + ), + 7: PinSpecification( + name="field_beam_young_modulus", + type_names=["field"], + optional=False, + document="""Field of beam's young modulus""", + ), + 8: PinSpecification( + name="field_beam_length", + type_names=["field"], + optional=False, + document="""Field of beam's length""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field_euler_critical_load", + type_names=["field"], + optional=False, + document="""This field contains euler's critical load + about the principle axis of the cross + section having the least moment of + inertia.""", + ), + 1: PinSpecification( + name="field_euler_critical_load_yy", + type_names=["field"], + optional=False, + document="""This field contains euler's critical load on + axis y.""", + ), + 2: PinSpecification( + name="field_euler_critical_load_zz", + type_names=["field"], + optional=False, + document="""This field contains euler's critical load on + axis z.""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="euler_load_buckling", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsEulerLoadBuckling + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsEulerLoadBuckling + """ + return super().outputs + + +class InputsEulerLoadBuckling(_Inputs): + """Intermediate class used to connect user inputs to + euler_load_buckling operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> my_field_beam_end_condition = dpf.DataSources() + >>> op.inputs.field_beam_end_condition.connect(my_field_beam_end_condition) + >>> my_field_beam_moment_inertia = dpf.Field() + >>> op.inputs.field_beam_moment_inertia.connect(my_field_beam_moment_inertia) + >>> my_field_beam_young_modulus = dpf.Field() + >>> op.inputs.field_beam_young_modulus.connect(my_field_beam_young_modulus) + >>> my_field_beam_length = dpf.Field() + >>> op.inputs.field_beam_length.connect(my_field_beam_length) + """ + + def __init__(self, op: Operator): + super().__init__(euler_load_buckling._spec().inputs, op) + self._field_beam_end_condition = Input( + euler_load_buckling._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._field_beam_end_condition) + self._field_beam_moment_inertia = Input( + euler_load_buckling._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._field_beam_moment_inertia) + self._field_beam_young_modulus = Input( + euler_load_buckling._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._field_beam_young_modulus) + self._field_beam_length = Input( + euler_load_buckling._spec().input_pin(8), 8, op, -1 + ) + self._inputs.append(self._field_beam_length) + + @property + def field_beam_end_condition(self): + """Allows to connect field_beam_end_condition input to the operator. + + this pin contains file csv or field of + beam's end condition added by the + user. if there's no file added, it + would take value of all beam's end + condition as 1. + + Parameters + ---------- + my_field_beam_end_condition : DataSources or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> op.inputs.field_beam_end_condition.connect(my_field_beam_end_condition) + >>> # or + >>> op.inputs.field_beam_end_condition(my_field_beam_end_condition) + """ + return self._field_beam_end_condition + + @property + def field_beam_moment_inertia(self): + """Allows to connect field_beam_moment_inertia input to the operator. + + Field of beam's moment inertia + + Parameters + ---------- + my_field_beam_moment_inertia : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> op.inputs.field_beam_moment_inertia.connect(my_field_beam_moment_inertia) + >>> # or + >>> op.inputs.field_beam_moment_inertia(my_field_beam_moment_inertia) + """ + return self._field_beam_moment_inertia + + @property + def field_beam_young_modulus(self): + """Allows to connect field_beam_young_modulus input to the operator. + + Field of beam's young modulus + + Parameters + ---------- + my_field_beam_young_modulus : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> op.inputs.field_beam_young_modulus.connect(my_field_beam_young_modulus) + >>> # or + >>> op.inputs.field_beam_young_modulus(my_field_beam_young_modulus) + """ + return self._field_beam_young_modulus + + @property + def field_beam_length(self): + """Allows to connect field_beam_length input to the operator. + + Field of beam's length + + Parameters + ---------- + my_field_beam_length : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> op.inputs.field_beam_length.connect(my_field_beam_length) + >>> # or + >>> op.inputs.field_beam_length(my_field_beam_length) + """ + return self._field_beam_length + + +class OutputsEulerLoadBuckling(_Outputs): + """Intermediate class used to get outputs from + euler_load_buckling operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> # Connect inputs : op.inputs. ... + >>> result_field_euler_critical_load = op.outputs.field_euler_critical_load() + >>> result_field_euler_critical_load_yy = op.outputs.field_euler_critical_load_yy() + >>> result_field_euler_critical_load_zz = op.outputs.field_euler_critical_load_zz() + """ + + def __init__(self, op: Operator): + super().__init__(euler_load_buckling._spec().outputs, op) + self._field_euler_critical_load = Output( + euler_load_buckling._spec().output_pin(0), 0, op + ) + self._outputs.append(self._field_euler_critical_load) + self._field_euler_critical_load_yy = Output( + euler_load_buckling._spec().output_pin(1), 1, op + ) + self._outputs.append(self._field_euler_critical_load_yy) + self._field_euler_critical_load_zz = Output( + euler_load_buckling._spec().output_pin(2), 2, op + ) + self._outputs.append(self._field_euler_critical_load_zz) + + @property + def field_euler_critical_load(self): + """Allows to get field_euler_critical_load output of the operator + + Returns + ---------- + my_field_euler_critical_load : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> # Connect inputs : op.inputs. ... + >>> result_field_euler_critical_load = op.outputs.field_euler_critical_load() + """ # noqa: E501 + return self._field_euler_critical_load + + @property + def field_euler_critical_load_yy(self): + """Allows to get field_euler_critical_load_yy output of the operator + + Returns + ---------- + my_field_euler_critical_load_yy : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> # Connect inputs : op.inputs. ... + >>> result_field_euler_critical_load_yy = op.outputs.field_euler_critical_load_yy() + """ # noqa: E501 + return self._field_euler_critical_load_yy + + @property + def field_euler_critical_load_zz(self): + """Allows to get field_euler_critical_load_zz output of the operator + + Returns + ---------- + my_field_euler_critical_load_zz : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_load_buckling() + >>> # Connect inputs : op.inputs. ... + >>> result_field_euler_critical_load_zz = op.outputs.field_euler_critical_load_zz() + """ # noqa: E501 + return self._field_euler_critical_load_zz diff --git a/ansys/dpf/core/operators/result/euler_nodes.py b/ansys/dpf/core/operators/result/euler_nodes.py index f04580a72cd..51eece26f2c 100644 --- a/ansys/dpf/core/operators/result/euler_nodes.py +++ b/ansys/dpf/core/operators/result/euler_nodes.py @@ -1,78 +1,151 @@ """ euler_nodes -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class euler_nodes(Operator): - """read a field made of 3 coordinates and 3 Euler angles (6 dofs) by node from the rst file. - - available inputs: - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - coord_and_euler (bool) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.euler_nodes() - - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_coord_and_euler = bool() - >>> op.inputs.coord_and_euler.connect(my_coord_and_euler) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.euler_nodes(streams_container=my_streams_container,data_sources=my_data_sources,coord_and_euler=my_coord_and_euler,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, streams_container=None, data_sources=None, coord_and_euler=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::coords_and_euler_nodes", config = config, server = server) + """read a field made of 3 coordinates and 3 Euler angles (6 dofs) by node + from the rst file. + + Parameters + ---------- + streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper, optional + data_sources : DataSources + coord_and_euler : bool + If true, then the field has ncomp=6 with 3 + oords and 3 euler angles, else there + is only the euler angles (default is + true) + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.euler_nodes() + + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_coord_and_euler = bool() + >>> op.inputs.coord_and_euler.connect(my_coord_and_euler) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.euler_nodes( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... coord_and_euler=my_coord_and_euler, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + streams_container=None, + data_sources=None, + coord_and_euler=None, + mesh=None, + config=None, + server=None, + ): + super().__init__( + name="mapdl::rst::coords_and_euler_nodes", config=config, server=server + ) self._inputs = InputsEulerNodes(self) self._outputs = OutputsEulerNodes(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if coord_and_euler !=None: + if coord_and_euler is not None: self.inputs.coord_and_euler.connect(coord_and_euler) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""read a field made of 3 coordinates and 3 Euler angles (6 dofs) by node from the rst file.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document=""""""), - 6 : PinSpecification(name = "coord_and_euler", type_names=["bool"], optional=False, document="""if true, then the field has ncomp=6 with 3 oords and 3 euler angles, else there is only the euler angles (default is true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """read a field made of 3 coordinates and 3 Euler angles (6 dofs) by node + from the rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=[ + "streams_container", + "stream", + "class dataProcessing::CRstFileWrapper", + ], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + 6: PinSpecification( + name="coord_and_euler", + type_names=["bool"], + optional=False, + document="""If true, then the field has ncomp=6 with 3 + oords and 3 euler angles, else there + is only the euler angles (default is + true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::coords_and_euler_nodes") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="mapdl::rst::coords_and_euler_nodes", server=server + ) @property def inputs(self): @@ -80,165 +153,159 @@ def inputs(self): Returns -------- - inputs : InputsEulerNodes + inputs : InputsEulerNodes """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsEulerNodes + outputs : OutputsEulerNodes """ return super().outputs -#internal name: mapdl::rst::coords_and_euler_nodes -#scripting name: euler_nodes class InputsEulerNodes(_Inputs): - """Intermediate class used to connect user inputs to euler_nodes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.euler_nodes() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_coord_and_euler = bool() - >>> op.inputs.coord_and_euler.connect(my_coord_and_euler) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_nodes() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_coord_and_euler = bool() + >>> op.inputs.coord_and_euler.connect(my_coord_and_euler) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(euler_nodes._spec().inputs, op) - self._streams_container = Input(euler_nodes._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(euler_nodes._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(euler_nodes._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(euler_nodes._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._coord_and_euler = Input(euler_nodes._spec().input_pin(6), 6, op, -1) + self._coord_and_euler = Input(euler_nodes._spec().input_pin(6), 6, op, -1) self._inputs.append(self._coord_and_euler) - self._mesh = Input(euler_nodes._spec().input_pin(7), 7, op, -1) + self._mesh = Input(euler_nodes._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.euler_nodes() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.euler_nodes() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def coord_and_euler(self): - """Allows to connect coord_and_euler input to the operator + """Allows to connect coord_and_euler input to the operator. - - pindoc: if true, then the field has ncomp=6 with 3 oords and 3 euler angles, else there is only the euler angles (default is true) + If true, then the field has ncomp=6 with 3 + oords and 3 euler angles, else there + is only the euler angles (default is + true) Parameters ---------- - my_coord_and_euler : bool, + my_coord_and_euler : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.euler_nodes() >>> op.inputs.coord_and_euler.connect(my_coord_and_euler) - >>> #or + >>> # or >>> op.inputs.coord_and_euler(my_coord_and_euler) - """ return self._coord_and_euler @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.euler_nodes() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsEulerNodes(_Outputs): - """Intermediate class used to get outputs from euler_nodes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.euler_nodes() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.euler_nodes() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(euler_nodes._spec().outputs, op) - self._fields_container = Output(euler_nodes._spec().output_pin(0), 0, op) + self._fields_container = Output(euler_nodes._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.euler_nodes() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/heat_flux.py b/ansys/dpf/core/operators/result/heat_flux.py index f92ac608dc0..9ac9cd14e57 100644 --- a/ansys/dpf/core/operators/result/heat_flux.py +++ b/ansys/dpf/core/operators/result/heat_flux.py @@ -1,98 +1,274 @@ """ heat_flux -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class heat_flux(Operator): - """Read/compute heat flux by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.heat_flux() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.heat_flux(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="TF", config = config, server = server) + """Read/compute heat flux by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.heat_flux() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.heat_flux( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="TF", config=config, server=server) self._inputs = InputsHeatFlux(self) self._outputs = OutputsHeatFlux(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute heat flux by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute heat flux by calling the readers defined by the + datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "TF") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="TF", server=server) @property def inputs(self): @@ -100,301 +276,325 @@ def inputs(self): Returns -------- - inputs : InputsHeatFlux + inputs : InputsHeatFlux """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsHeatFlux + outputs : OutputsHeatFlux """ return super().outputs -#internal name: TF -#scripting name: heat_flux class InputsHeatFlux(_Inputs): - """Intermediate class used to connect user inputs to heat_flux operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.heat_flux() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + heat_flux operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(heat_flux._spec().inputs, op) - self._time_scoping = Input(heat_flux._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(heat_flux._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(heat_flux._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(heat_flux._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(heat_flux._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(heat_flux._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(heat_flux._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(heat_flux._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(heat_flux._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(heat_flux._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(heat_flux._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(heat_flux._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(heat_flux._spec().input_pin(7), 7, op, -1) + self._mesh = Input(heat_flux._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(heat_flux._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(heat_flux._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(heat_flux._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(heat_flux._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(heat_flux._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsHeatFlux(_Outputs): - """Intermediate class used to get outputs from heat_flux operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.heat_flux() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + heat_flux operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(heat_flux._spec().outputs, op) - self._fields_container = Output(heat_flux._spec().output_pin(0), 0, op) + self._fields_container = Output(heat_flux._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/heat_flux_X.py b/ansys/dpf/core/operators/result/heat_flux_X.py index d3b90ac338a..1c87341be19 100644 --- a/ansys/dpf/core/operators/result/heat_flux_X.py +++ b/ansys/dpf/core/operators/result/heat_flux_X.py @@ -1,98 +1,256 @@ """ heat_flux_X -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class heat_flux_X(Operator): - """Read/compute heat flux X component of the vector (1st component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.heat_flux_X() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.heat_flux_X(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="TFX", config = config, server = server) + """Read/compute heat flux X component of the vector (1st component) by + calling the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.heat_flux_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.heat_flux_X( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="TFX", config=config, server=server) self._inputs = InputsHeatFluxX(self) self._outputs = OutputsHeatFluxX(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute heat flux X component of the vector (1st component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute heat flux X component of the vector (1st component) by + calling the readers defined by the datasources. Regarding + the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "TFX") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="TFX", server=server) @property def inputs(self): @@ -100,301 +258,299 @@ def inputs(self): Returns -------- - inputs : InputsHeatFluxX + inputs : InputsHeatFluxX """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsHeatFluxX + outputs : OutputsHeatFluxX """ return super().outputs -#internal name: TFX -#scripting name: heat_flux_X class InputsHeatFluxX(_Inputs): - """Intermediate class used to connect user inputs to heat_flux_X operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.heat_flux_X() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + heat_flux_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(heat_flux_X._spec().inputs, op) - self._time_scoping = Input(heat_flux_X._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(heat_flux_X._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(heat_flux_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(heat_flux_X._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(heat_flux_X._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(heat_flux_X._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(heat_flux_X._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(heat_flux_X._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(heat_flux_X._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(heat_flux_X._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(heat_flux_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(heat_flux_X._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(heat_flux_X._spec().input_pin(7), 7, op, -1) + self._mesh = Input(heat_flux_X._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(heat_flux_X._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(heat_flux_X._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(heat_flux_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(heat_flux_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsHeatFluxX(_Outputs): - """Intermediate class used to get outputs from heat_flux_X operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.heat_flux_X() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + heat_flux_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(heat_flux_X._spec().outputs, op) - self._fields_container = Output(heat_flux_X._spec().output_pin(0), 0, op) + self._fields_container = Output(heat_flux_X._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_X() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/heat_flux_Y.py b/ansys/dpf/core/operators/result/heat_flux_Y.py index 84aa5f73a9c..a38b737ef9c 100644 --- a/ansys/dpf/core/operators/result/heat_flux_Y.py +++ b/ansys/dpf/core/operators/result/heat_flux_Y.py @@ -1,98 +1,256 @@ """ heat_flux_Y -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class heat_flux_Y(Operator): - """Read/compute heat flux Y component of the vector (2nd component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.heat_flux_Y() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.heat_flux_Y(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="TFY", config = config, server = server) + """Read/compute heat flux Y component of the vector (2nd component) by + calling the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.heat_flux_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.heat_flux_Y( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="TFY", config=config, server=server) self._inputs = InputsHeatFluxY(self) self._outputs = OutputsHeatFluxY(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute heat flux Y component of the vector (2nd component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute heat flux Y component of the vector (2nd component) by + calling the readers defined by the datasources. Regarding + the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "TFY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="TFY", server=server) @property def inputs(self): @@ -100,301 +258,299 @@ def inputs(self): Returns -------- - inputs : InputsHeatFluxY + inputs : InputsHeatFluxY """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsHeatFluxY + outputs : OutputsHeatFluxY """ return super().outputs -#internal name: TFY -#scripting name: heat_flux_Y class InputsHeatFluxY(_Inputs): - """Intermediate class used to connect user inputs to heat_flux_Y operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.heat_flux_Y() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + heat_flux_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(heat_flux_Y._spec().inputs, op) - self._time_scoping = Input(heat_flux_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(heat_flux_Y._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(heat_flux_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(heat_flux_Y._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(heat_flux_Y._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(heat_flux_Y._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(heat_flux_Y._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(heat_flux_Y._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(heat_flux_Y._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(heat_flux_Y._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(heat_flux_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(heat_flux_Y._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(heat_flux_Y._spec().input_pin(7), 7, op, -1) + self._mesh = Input(heat_flux_Y._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(heat_flux_Y._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(heat_flux_Y._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(heat_flux_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(heat_flux_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsHeatFluxY(_Outputs): - """Intermediate class used to get outputs from heat_flux_Y operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.heat_flux_Y() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + heat_flux_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(heat_flux_Y._spec().outputs, op) - self._fields_container = Output(heat_flux_Y._spec().output_pin(0), 0, op) + self._fields_container = Output(heat_flux_Y._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Y() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/heat_flux_Z.py b/ansys/dpf/core/operators/result/heat_flux_Z.py index bfe292bc5e3..2ebcc8c5e6f 100644 --- a/ansys/dpf/core/operators/result/heat_flux_Z.py +++ b/ansys/dpf/core/operators/result/heat_flux_Z.py @@ -1,98 +1,256 @@ """ heat_flux_Z -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class heat_flux_Z(Operator): - """Read/compute heat flux Z component of the vector (3rd component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.heat_flux_Z() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.heat_flux_Z(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="TFZ", config = config, server = server) + """Read/compute heat flux Z component of the vector (3rd component) by + calling the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.heat_flux_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.heat_flux_Z( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="TFZ", config=config, server=server) self._inputs = InputsHeatFluxZ(self) self._outputs = OutputsHeatFluxZ(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute heat flux Z component of the vector (3rd component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute heat flux Z component of the vector (3rd component) by + calling the readers defined by the datasources. Regarding + the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "TFZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="TFZ", server=server) @property def inputs(self): @@ -100,301 +258,299 @@ def inputs(self): Returns -------- - inputs : InputsHeatFluxZ + inputs : InputsHeatFluxZ """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsHeatFluxZ + outputs : OutputsHeatFluxZ """ return super().outputs -#internal name: TFZ -#scripting name: heat_flux_Z class InputsHeatFluxZ(_Inputs): - """Intermediate class used to connect user inputs to heat_flux_Z operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.heat_flux_Z() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + heat_flux_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(heat_flux_Z._spec().inputs, op) - self._time_scoping = Input(heat_flux_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(heat_flux_Z._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(heat_flux_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(heat_flux_Z._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(heat_flux_Z._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(heat_flux_Z._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(heat_flux_Z._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(heat_flux_Z._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(heat_flux_Z._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(heat_flux_Z._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(heat_flux_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(heat_flux_Z._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(heat_flux_Z._spec().input_pin(7), 7, op, -1) + self._mesh = Input(heat_flux_Z._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(heat_flux_Z._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(heat_flux_Z._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(heat_flux_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(heat_flux_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsHeatFluxZ(_Outputs): - """Intermediate class used to get outputs from heat_flux_Z operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.heat_flux_Z() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + heat_flux_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.heat_flux_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(heat_flux_Z._spec().outputs, op) - self._fields_container = Output(heat_flux_Z._spec().output_pin(0), 0, op) + self._fields_container = Output(heat_flux_Z._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.heat_flux_Z() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/hydrostatic_pressure.py b/ansys/dpf/core/operators/result/hydrostatic_pressure.py index e7b7e4e5335..3ff8545d7be 100644 --- a/ansys/dpf/core/operators/result/hydrostatic_pressure.py +++ b/ansys/dpf/core/operators/result/hydrostatic_pressure.py @@ -1,98 +1,274 @@ """ hydrostatic_pressure -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class hydrostatic_pressure(Operator): - """Read/compute element nodal hydrostatic pressure by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.hydrostatic_pressure() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.hydrostatic_pressure(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_HPRES", config = config, server = server) + """Read/compute element nodal hydrostatic pressure by calling the readers + defined by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.hydrostatic_pressure() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.hydrostatic_pressure( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_HPRES", config=config, server=server) self._inputs = InputsHydrostaticPressure(self) self._outputs = OutputsHydrostaticPressure(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal hydrostatic pressure by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal hydrostatic pressure by calling the readers + defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_HPRES") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_HPRES", server=server) @property def inputs(self): @@ -100,301 +276,337 @@ def inputs(self): Returns -------- - inputs : InputsHydrostaticPressure + inputs : InputsHydrostaticPressure """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsHydrostaticPressure + outputs : OutputsHydrostaticPressure """ return super().outputs -#internal name: ENL_HPRES -#scripting name: hydrostatic_pressure class InputsHydrostaticPressure(_Inputs): - """Intermediate class used to connect user inputs to hydrostatic_pressure operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.hydrostatic_pressure() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + hydrostatic_pressure operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.hydrostatic_pressure() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(hydrostatic_pressure._spec().inputs, op) - self._time_scoping = Input(hydrostatic_pressure._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(hydrostatic_pressure._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(hydrostatic_pressure._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(hydrostatic_pressure._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(hydrostatic_pressure._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + hydrostatic_pressure._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(hydrostatic_pressure._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + hydrostatic_pressure._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(hydrostatic_pressure._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(hydrostatic_pressure._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(hydrostatic_pressure._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + hydrostatic_pressure._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(hydrostatic_pressure._spec().input_pin(7), 7, op, -1) + self._mesh = Input(hydrostatic_pressure._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(hydrostatic_pressure._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + hydrostatic_pressure._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(hydrostatic_pressure._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + hydrostatic_pressure._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input(hydrostatic_pressure._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.hydrostatic_pressure() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsHydrostaticPressure(_Outputs): - """Intermediate class used to get outputs from hydrostatic_pressure operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.hydrostatic_pressure() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + hydrostatic_pressure operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.hydrostatic_pressure() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(hydrostatic_pressure._spec().outputs, op) - self._fields_container = Output(hydrostatic_pressure._spec().output_pin(0), 0, op) + self._fields_container = Output( + hydrostatic_pressure._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.hydrostatic_pressure() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/incremental_energy.py b/ansys/dpf/core/operators/result/incremental_energy.py index 3afb475c56e..47b3bd4b629 100644 --- a/ansys/dpf/core/operators/result/incremental_energy.py +++ b/ansys/dpf/core/operators/result/incremental_energy.py @@ -1,92 +1,238 @@ """ incremental_energy -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class incremental_energy(Operator): - """Read/compute incremental energy (magnetics) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.incremental_energy() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.incremental_energy(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ENG_INC", config = config, server = server) + """Read/compute incremental energy (magnetics) by calling the readers + defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.incremental_energy() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.incremental_energy( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ENG_INC", config=config, server=server) self._inputs = InputsIncrementalEnergy(self) self._outputs = OutputsIncrementalEnergy(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute incremental energy (magnetics) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute incremental energy (magnetics) by calling the readers + defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENG_INC") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENG_INC", server=server) @property def inputs(self): @@ -94,275 +240,281 @@ def inputs(self): Returns -------- - inputs : InputsIncrementalEnergy + inputs : InputsIncrementalEnergy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIncrementalEnergy + outputs : OutputsIncrementalEnergy """ return super().outputs -#internal name: ENG_INC -#scripting name: incremental_energy class InputsIncrementalEnergy(_Inputs): - """Intermediate class used to connect user inputs to incremental_energy operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.incremental_energy() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + incremental_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.incremental_energy() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(incremental_energy._spec().inputs, op) - self._time_scoping = Input(incremental_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(incremental_energy._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(incremental_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(incremental_energy._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(incremental_energy._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + incremental_energy._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(incremental_energy._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + incremental_energy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(incremental_energy._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(incremental_energy._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(incremental_energy._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + incremental_energy._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(incremental_energy._spec().input_pin(7), 7, op, -1) + self._mesh = Input(incremental_energy._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(incremental_energy._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(incremental_energy._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsIncrementalEnergy(_Outputs): - """Intermediate class used to get outputs from incremental_energy operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.incremental_energy() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + incremental_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.incremental_energy() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(incremental_energy._spec().outputs, op) - self._fields_container = Output(incremental_energy._spec().output_pin(0), 0, op) + self._fields_container = Output(incremental_energy._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.incremental_energy() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/joint_force_reaction.py b/ansys/dpf/core/operators/result/joint_force_reaction.py new file mode 100644 index 00000000000..1c11e9c6ecc --- /dev/null +++ b/ansys/dpf/core/operators/result/joint_force_reaction.py @@ -0,0 +1,524 @@ +""" +joint_force_reaction +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class joint_force_reaction(Operator): + """Read/compute joint force reaction by calling the readers defined by + the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.joint_force_reaction() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.joint_force_reaction( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="JFL", config=config, server=server) + self._inputs = InputsJointForceReaction(self) + self._outputs = OutputsJointForceReaction(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute joint force reaction by calling the readers defined by + the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="JFL", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsJointForceReaction + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsJointForceReaction + """ + return super().outputs + + +class InputsJointForceReaction(_Inputs): + """Intermediate class used to connect user inputs to + joint_force_reaction operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(joint_force_reaction._spec().inputs, op) + self._time_scoping = Input(joint_force_reaction._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input(joint_force_reaction._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + joint_force_reaction._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + joint_force_reaction._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(joint_force_reaction._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + joint_force_reaction._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(joint_force_reaction._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + joint_force_reaction._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsJointForceReaction(_Outputs): + """Intermediate class used to get outputs from + joint_force_reaction operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(joint_force_reaction._spec().outputs, op) + self._fields_container = Output( + joint_force_reaction._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_force_reaction() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/joint_moment_reaction.py b/ansys/dpf/core/operators/result/joint_moment_reaction.py new file mode 100644 index 00000000000..bbcfb398889 --- /dev/null +++ b/ansys/dpf/core/operators/result/joint_moment_reaction.py @@ -0,0 +1,530 @@ +""" +joint_moment_reaction +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class joint_moment_reaction(Operator): + """Read/compute joint moment reaction by calling the readers defined by + the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.joint_moment_reaction() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.joint_moment_reaction( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="JML", config=config, server=server) + self._inputs = InputsJointMomentReaction(self) + self._outputs = OutputsJointMomentReaction(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute joint moment reaction by calling the readers defined by + the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="JML", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsJointMomentReaction + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsJointMomentReaction + """ + return super().outputs + + +class InputsJointMomentReaction(_Inputs): + """Intermediate class used to connect user inputs to + joint_moment_reaction operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(joint_moment_reaction._spec().inputs, op) + self._time_scoping = Input( + joint_moment_reaction._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + joint_moment_reaction._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + joint_moment_reaction._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + joint_moment_reaction._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + joint_moment_reaction._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + joint_moment_reaction._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(joint_moment_reaction._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + joint_moment_reaction._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsJointMomentReaction(_Outputs): + """Intermediate class used to get outputs from + joint_moment_reaction operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(joint_moment_reaction._spec().outputs, op) + self._fields_container = Output( + joint_moment_reaction._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_moment_reaction() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/joint_relative_acceleration.py b/ansys/dpf/core/operators/result/joint_relative_acceleration.py new file mode 100644 index 00000000000..c211bbe126d --- /dev/null +++ b/ansys/dpf/core/operators/result/joint_relative_acceleration.py @@ -0,0 +1,530 @@ +""" +joint_relative_acceleration +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class joint_relative_acceleration(Operator): + """Read/compute joint relative acceleration by calling the readers + defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.joint_relative_acceleration() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.joint_relative_acceleration( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="JA", config=config, server=server) + self._inputs = InputsJointRelativeAcceleration(self) + self._outputs = OutputsJointRelativeAcceleration(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute joint relative acceleration by calling the readers + defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="JA", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsJointRelativeAcceleration + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsJointRelativeAcceleration + """ + return super().outputs + + +class InputsJointRelativeAcceleration(_Inputs): + """Intermediate class used to connect user inputs to + joint_relative_acceleration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_acceleration._spec().inputs, op) + self._time_scoping = Input( + joint_relative_acceleration._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + joint_relative_acceleration._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + joint_relative_acceleration._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + joint_relative_acceleration._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + joint_relative_acceleration._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + joint_relative_acceleration._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(joint_relative_acceleration._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + joint_relative_acceleration._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsJointRelativeAcceleration(_Outputs): + """Intermediate class used to get outputs from + joint_relative_acceleration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_acceleration._spec().outputs, op) + self._fields_container = Output( + joint_relative_acceleration._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_acceleration() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/joint_relative_angular_acceleration.py b/ansys/dpf/core/operators/result/joint_relative_angular_acceleration.py new file mode 100644 index 00000000000..9ce7a0f6526 --- /dev/null +++ b/ansys/dpf/core/operators/result/joint_relative_angular_acceleration.py @@ -0,0 +1,532 @@ +""" +joint_relative_angular_acceleration +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class joint_relative_angular_acceleration(Operator): + """Read/compute joint relative angular acceleration by calling the + readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.joint_relative_angular_acceleration( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="JDOMG", config=config, server=server) + self._inputs = InputsJointRelativeAngularAcceleration(self) + self._outputs = OutputsJointRelativeAngularAcceleration(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute joint relative angular acceleration by calling the + readers defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="JDOMG", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsJointRelativeAngularAcceleration + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsJointRelativeAngularAcceleration + """ + return super().outputs + + +class InputsJointRelativeAngularAcceleration(_Inputs): + """Intermediate class used to connect user inputs to + joint_relative_angular_acceleration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_angular_acceleration._spec().inputs, op) + self._time_scoping = Input( + joint_relative_angular_acceleration._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + joint_relative_angular_acceleration._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + joint_relative_angular_acceleration._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + joint_relative_angular_acceleration._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + joint_relative_angular_acceleration._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + joint_relative_angular_acceleration._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input( + joint_relative_angular_acceleration._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + joint_relative_angular_acceleration._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsJointRelativeAngularAcceleration(_Outputs): + """Intermediate class used to get outputs from + joint_relative_angular_acceleration operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_angular_acceleration._spec().outputs, op) + self._fields_container = Output( + joint_relative_angular_acceleration._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_acceleration() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/joint_relative_angular_velocity.py b/ansys/dpf/core/operators/result/joint_relative_angular_velocity.py new file mode 100644 index 00000000000..a6e46703660 --- /dev/null +++ b/ansys/dpf/core/operators/result/joint_relative_angular_velocity.py @@ -0,0 +1,532 @@ +""" +joint_relative_angular_velocity +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class joint_relative_angular_velocity(Operator): + """Read/compute joint relative angular velocity by calling the readers + defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.joint_relative_angular_velocity() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.joint_relative_angular_velocity( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="JOMG", config=config, server=server) + self._inputs = InputsJointRelativeAngularVelocity(self) + self._outputs = OutputsJointRelativeAngularVelocity(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute joint relative angular velocity by calling the readers + defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="JOMG", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsJointRelativeAngularVelocity + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsJointRelativeAngularVelocity + """ + return super().outputs + + +class InputsJointRelativeAngularVelocity(_Inputs): + """Intermediate class used to connect user inputs to + joint_relative_angular_velocity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_angular_velocity._spec().inputs, op) + self._time_scoping = Input( + joint_relative_angular_velocity._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + joint_relative_angular_velocity._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + joint_relative_angular_velocity._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + joint_relative_angular_velocity._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + joint_relative_angular_velocity._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + joint_relative_angular_velocity._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input( + joint_relative_angular_velocity._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + joint_relative_angular_velocity._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsJointRelativeAngularVelocity(_Outputs): + """Intermediate class used to get outputs from + joint_relative_angular_velocity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_angular_velocity._spec().outputs, op) + self._fields_container = Output( + joint_relative_angular_velocity._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_angular_velocity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/joint_relative_displacement.py b/ansys/dpf/core/operators/result/joint_relative_displacement.py new file mode 100644 index 00000000000..34be963b844 --- /dev/null +++ b/ansys/dpf/core/operators/result/joint_relative_displacement.py @@ -0,0 +1,530 @@ +""" +joint_relative_displacement +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class joint_relative_displacement(Operator): + """Read/compute joint relative displacement by calling the readers + defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.joint_relative_displacement() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.joint_relative_displacement( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="JU", config=config, server=server) + self._inputs = InputsJointRelativeDisplacement(self) + self._outputs = OutputsJointRelativeDisplacement(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute joint relative displacement by calling the readers + defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="JU", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsJointRelativeDisplacement + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsJointRelativeDisplacement + """ + return super().outputs + + +class InputsJointRelativeDisplacement(_Inputs): + """Intermediate class used to connect user inputs to + joint_relative_displacement operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_displacement._spec().inputs, op) + self._time_scoping = Input( + joint_relative_displacement._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + joint_relative_displacement._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + joint_relative_displacement._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + joint_relative_displacement._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + joint_relative_displacement._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + joint_relative_displacement._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(joint_relative_displacement._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + joint_relative_displacement._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsJointRelativeDisplacement(_Outputs): + """Intermediate class used to get outputs from + joint_relative_displacement operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_displacement._spec().outputs, op) + self._fields_container = Output( + joint_relative_displacement._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_displacement() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/joint_relative_rotation.py b/ansys/dpf/core/operators/result/joint_relative_rotation.py new file mode 100644 index 00000000000..65b354aba74 --- /dev/null +++ b/ansys/dpf/core/operators/result/joint_relative_rotation.py @@ -0,0 +1,530 @@ +""" +joint_relative_rotation +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class joint_relative_rotation(Operator): + """Read/compute joint relative rotation by calling the readers defined by + the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.joint_relative_rotation() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.joint_relative_rotation( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="JR", config=config, server=server) + self._inputs = InputsJointRelativeRotation(self) + self._outputs = OutputsJointRelativeRotation(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute joint relative rotation by calling the readers defined by + the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="JR", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsJointRelativeRotation + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsJointRelativeRotation + """ + return super().outputs + + +class InputsJointRelativeRotation(_Inputs): + """Intermediate class used to connect user inputs to + joint_relative_rotation operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_rotation._spec().inputs, op) + self._time_scoping = Input( + joint_relative_rotation._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + joint_relative_rotation._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + joint_relative_rotation._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + joint_relative_rotation._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + joint_relative_rotation._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + joint_relative_rotation._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(joint_relative_rotation._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + joint_relative_rotation._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsJointRelativeRotation(_Outputs): + """Intermediate class used to get outputs from + joint_relative_rotation operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_rotation._spec().outputs, op) + self._fields_container = Output( + joint_relative_rotation._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_rotation() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/joint_relative_velocity.py b/ansys/dpf/core/operators/result/joint_relative_velocity.py new file mode 100644 index 00000000000..93851b1383a --- /dev/null +++ b/ansys/dpf/core/operators/result/joint_relative_velocity.py @@ -0,0 +1,530 @@ +""" +joint_relative_velocity +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class joint_relative_velocity(Operator): + """Read/compute joint relative velocity by calling the readers defined by + the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.joint_relative_velocity() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.joint_relative_velocity( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="JV", config=config, server=server) + self._inputs = InputsJointRelativeVelocity(self) + self._outputs = OutputsJointRelativeVelocity(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + + @staticmethod + def _spec(): + description = """Read/compute joint relative velocity by calling the readers defined by + the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="JV", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsJointRelativeVelocity + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsJointRelativeVelocity + """ + return super().outputs + + +class InputsJointRelativeVelocity(_Inputs): + """Intermediate class used to connect user inputs to + joint_relative_velocity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_velocity._spec().inputs, op) + self._time_scoping = Input( + joint_relative_velocity._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input( + joint_relative_velocity._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + joint_relative_velocity._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + joint_relative_velocity._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + joint_relative_velocity._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + joint_relative_velocity._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(joint_relative_velocity._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._read_cyclic = Input( + joint_relative_velocity._spec().input_pin(14), 14, op, -1 + ) + self._inputs.append(self._read_cyclic) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + +class OutputsJointRelativeVelocity(_Outputs): + """Intermediate class used to get outputs from + joint_relative_velocity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(joint_relative_velocity._spec().outputs, op) + self._fields_container = Output( + joint_relative_velocity._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.joint_relative_velocity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/kinetic_energy.py b/ansys/dpf/core/operators/result/kinetic_energy.py index e148669ee3f..6dd04eb1cda 100644 --- a/ansys/dpf/core/operators/result/kinetic_energy.py +++ b/ansys/dpf/core/operators/result/kinetic_energy.py @@ -1,92 +1,238 @@ """ kinetic_energy -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class kinetic_energy(Operator): - """Read/compute kinetic energy by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.kinetic_energy() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.kinetic_energy(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ENG_KE", config = config, server = server) + """Read/compute kinetic energy by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.kinetic_energy() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.kinetic_energy( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ENG_KE", config=config, server=server) self._inputs = InputsKineticEnergy(self) self._outputs = OutputsKineticEnergy(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute kinetic energy by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute kinetic energy by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENG_KE") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENG_KE", server=server) @property def inputs(self): @@ -94,275 +240,277 @@ def inputs(self): Returns -------- - inputs : InputsKineticEnergy + inputs : InputsKineticEnergy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsKineticEnergy + outputs : OutputsKineticEnergy """ return super().outputs -#internal name: ENG_KE -#scripting name: kinetic_energy class InputsKineticEnergy(_Inputs): - """Intermediate class used to connect user inputs to kinetic_energy operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.kinetic_energy() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + kinetic_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.kinetic_energy() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(kinetic_energy._spec().inputs, op) - self._time_scoping = Input(kinetic_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(kinetic_energy._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(kinetic_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(kinetic_energy._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(kinetic_energy._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(kinetic_energy._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(kinetic_energy._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(kinetic_energy._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(kinetic_energy._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(kinetic_energy._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(kinetic_energy._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + kinetic_energy._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(kinetic_energy._spec().input_pin(7), 7, op, -1) + self._mesh = Input(kinetic_energy._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(kinetic_energy._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(kinetic_energy._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsKineticEnergy(_Outputs): - """Intermediate class used to get outputs from kinetic_energy operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.kinetic_energy() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + kinetic_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.kinetic_energy() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(kinetic_energy._spec().outputs, op) - self._fields_container = Output(kinetic_energy._spec().output_pin(0), 0, op) + self._fields_container = Output(kinetic_energy._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.kinetic_energy() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/material_property_of_element.py b/ansys/dpf/core/operators/result/material_property_of_element.py index cee43d6ef53..4bb17a4f5da 100644 --- a/ansys/dpf/core/operators/result/material_property_of_element.py +++ b/ansys/dpf/core/operators/result/material_property_of_element.py @@ -1,66 +1,105 @@ """ material_property_of_element -============================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class material_property_of_element(Operator): - """ Load the appropriate operator based on the data sources and get material properties + """Load the appropriate operator based on the data sources and get + material properties + + Parameters + ---------- + streams_container : StreamsContainer, optional + data_sources : DataSources - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - available outputs: - - material_properties (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.result.material_property_of_element() - >>> # Instantiate operator - >>> op = dpf.operators.result.material_property_of_element() + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.material_property_of_element( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.material_property_of_element(streams_container=my_streams_container,data_sources=my_data_sources) + >>> # Get output data + >>> result_material_properties = op.outputs.material_properties() + """ - >>> # Get output data - >>> result_material_properties = op.outputs.material_properties()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="MaterialPropertyOfElement", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__(name="MaterialPropertyOfElement", config=config, server=server) self._inputs = InputsMaterialPropertyOfElement(self) self._outputs = OutputsMaterialPropertyOfElement(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description=""" Load the appropriate operator based on the data sources and get material properties""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "material_properties", type_names=["field"], optional=False, document="""material properties""")}) + description = """ Load the appropriate operator based on the data sources and get + material properties""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="material_properties", + type_names=["field"], + optional=False, + document="""Material properties""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "MaterialPropertyOfElement") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="MaterialPropertyOfElement", server=server) @property def inputs(self): @@ -68,117 +107,115 @@ def inputs(self): Returns -------- - inputs : InputsMaterialPropertyOfElement + inputs : InputsMaterialPropertyOfElement """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMaterialPropertyOfElement + outputs : OutputsMaterialPropertyOfElement """ return super().outputs -#internal name: MaterialPropertyOfElement -#scripting name: material_property_of_element class InputsMaterialPropertyOfElement(_Inputs): - """Intermediate class used to connect user inputs to material_property_of_element operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.material_property_of_element() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + material_property_of_element operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.material_property_of_element() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(material_property_of_element._spec().inputs, op) - self._streams_container = Input(material_property_of_element._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + material_property_of_element._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(material_property_of_element._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + material_property_of_element._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.material_property_of_element() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.material_property_of_element() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsMaterialPropertyOfElement(_Outputs): - """Intermediate class used to get outputs from material_property_of_element operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.material_property_of_element() - >>> # Connect inputs : op.inputs. ... - >>> result_material_properties = op.outputs.material_properties() + """Intermediate class used to get outputs from + material_property_of_element operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.material_property_of_element() + >>> # Connect inputs : op.inputs. ... + >>> result_material_properties = op.outputs.material_properties() """ + def __init__(self, op: Operator): super().__init__(material_property_of_element._spec().outputs, op) - self._material_properties = Output(material_property_of_element._spec().output_pin(0), 0, op) + self._material_properties = Output( + material_property_of_element._spec().output_pin(0), 0, op + ) self._outputs.append(self._material_properties) @property def material_properties(self): """Allows to get material_properties output of the operator - - - pindoc: material properties - Returns ---------- - my_material_properties : Field, + my_material_properties : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.material_property_of_element() >>> # Connect inputs : op.inputs. ... - >>> result_material_properties = op.outputs.material_properties() - """ + >>> result_material_properties = op.outputs.material_properties() + """ # noqa: E501 return self._material_properties - diff --git a/ansys/dpf/core/operators/result/members_in_bending_not_certified.py b/ansys/dpf/core/operators/result/members_in_bending_not_certified.py new file mode 100644 index 00000000000..22759da71a5 --- /dev/null +++ b/ansys/dpf/core/operators/result/members_in_bending_not_certified.py @@ -0,0 +1,605 @@ +""" +members_in_bending_not_certified +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class members_in_bending_not_certified(Operator): + """This operator is a non-certified example of buckling resistance + verification for the bending members. It is only provided as an + example if you want to develop your own compute norm operator. The + results computed by this beta operator have not been certified by + ANSYS. ANSYS declines all responsibility for the use of this + operator. HATS Beam and irregular beams (unequal I-Beam, not- + square Channel-Beam, not-square Angle L-beam, unequal hollow + rectanglar beam) not supported. + + Parameters + ---------- + time_scoping : Scoping or int, optional + field_yield_strength : Field + This pin contains field of beam's yield + strength defined by the user. + class_cross_section : bool + Selection for a cross-section. true: class 1 + or 2 cross-sections. false: class 3 + cross section. if the user defines + the cross section as class 1 or 2, + the section modulus would be plastic + section modulus. if it's class 3- + cross section,the section modulus + would be elastic section modulus + streams : StreamsContainer, optional + result file container allowed to be kept + open to cache data. + data_sources : DataSources, optional + Result file path container, used if no + streams are set. + partial_factor : float + Partial safety factor for resistance of + members to instability assessed by + member checks. default value: 1. + mesh : MeshedRegion + mesh containing beam's properties defined by + user + bending_moment_y : FieldsContainer + Fields container of bending moment on axis y + defined by user + bending_moment_z : FieldsContainer + Fields container of bending moment on axis z + defined by user + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.members_in_bending_not_certified() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_field_yield_strength = dpf.Field() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> my_class_cross_section = bool() + >>> op.inputs.class_cross_section.connect(my_class_cross_section) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_partial_factor = float() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_bending_moment_y = dpf.FieldsContainer() + >>> op.inputs.bending_moment_y.connect(my_bending_moment_y) + >>> my_bending_moment_z = dpf.FieldsContainer() + >>> op.inputs.bending_moment_z.connect(my_bending_moment_z) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.members_in_bending_not_certified( + ... time_scoping=my_time_scoping, + ... field_yield_strength=my_field_yield_strength, + ... class_cross_section=my_class_cross_section, + ... streams=my_streams, + ... data_sources=my_data_sources, + ... partial_factor=my_partial_factor, + ... mesh=my_mesh, + ... bending_moment_y=my_bending_moment_y, + ... bending_moment_z=my_bending_moment_z, + ... ) + + >>> # Get output data + >>> result_buckling_resistance_bending_yy = op.outputs.buckling_resistance_bending_yy() + >>> result_buckling_resistance_bending_zz = op.outputs.buckling_resistance_bending_zz() + """ + + def __init__( + self, + time_scoping=None, + field_yield_strength=None, + class_cross_section=None, + streams=None, + data_sources=None, + partial_factor=None, + mesh=None, + bending_moment_y=None, + bending_moment_z=None, + config=None, + server=None, + ): + super().__init__( + name="members_in_bending_not_certified", config=config, server=server + ) + self._inputs = InputsMembersInBendingNotCertified(self) + self._outputs = OutputsMembersInBendingNotCertified(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if field_yield_strength is not None: + self.inputs.field_yield_strength.connect(field_yield_strength) + if class_cross_section is not None: + self.inputs.class_cross_section.connect(class_cross_section) + if streams is not None: + self.inputs.streams.connect(streams) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if partial_factor is not None: + self.inputs.partial_factor.connect(partial_factor) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if bending_moment_y is not None: + self.inputs.bending_moment_y.connect(bending_moment_y) + if bending_moment_z is not None: + self.inputs.bending_moment_z.connect(bending_moment_z) + + @staticmethod + def _spec(): + description = """This operator is a non-certified example of buckling resistance + verification for the bending members. It is only provided + as an example if you want to develop your own compute norm + operator. The results computed by this beta operator have + not been certified by ANSYS. ANSYS declines all + responsibility for the use of this operator. HATS Beam and + irregular beams (unequal I-Beam, not-square Channel-Beam, + not-square Angle L-beam, unequal hollow rectanglar beam) + not supported.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector", "int32"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="field_yield_strength", + type_names=["field"], + optional=False, + document="""This pin contains field of beam's yield + strength defined by the user.""", + ), + 2: PinSpecification( + name="class_cross_section", + type_names=["bool"], + optional=False, + document="""Selection for a cross-section. true: class 1 + or 2 cross-sections. false: class 3 + cross section. if the user defines + the cross section as class 1 or 2, + the section modulus would be plastic + section modulus. if it's class 3- + cross section,the section modulus + would be elastic section modulus""", + ), + 3: PinSpecification( + name="streams", + type_names=["streams_container"], + optional=True, + document=""" result file container allowed to be kept + open to cache data.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Result file path container, used if no + streams are set.""", + ), + 6: PinSpecification( + name="partial_factor", + type_names=["double"], + optional=False, + document="""Partial safety factor for resistance of + members to instability assessed by + member checks. default value: 1.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document=""" mesh containing beam's properties defined by + user""", + ), + 8: PinSpecification( + name="bending_moment_y", + type_names=["fields_container"], + optional=False, + document="""Fields container of bending moment on axis y + defined by user""", + ), + 9: PinSpecification( + name="bending_moment_z", + type_names=["fields_container"], + optional=False, + document="""Fields container of bending moment on axis z + defined by user""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="buckling_resistance_bending_yy", + type_names=["fields_container"], + optional=False, + document="""Fields container of buckling resistance + factor on axis y-y in case of + bending(m). these factors should be + less than 1 and positive.""", + ), + 1: PinSpecification( + name="buckling_resistance_bending_zz", + type_names=["fields_container"], + optional=False, + document="""Fields container of buckling resistance + factor on axis z-z in case of + bending(m). these factors should be + less than 1 and positive.""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="members_in_bending_not_certified", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMembersInBendingNotCertified + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMembersInBendingNotCertified + """ + return super().outputs + + +class InputsMembersInBendingNotCertified(_Inputs): + """Intermediate class used to connect user inputs to + members_in_bending_not_certified operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_field_yield_strength = dpf.Field() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> my_class_cross_section = bool() + >>> op.inputs.class_cross_section.connect(my_class_cross_section) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_partial_factor = float() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_bending_moment_y = dpf.FieldsContainer() + >>> op.inputs.bending_moment_y.connect(my_bending_moment_y) + >>> my_bending_moment_z = dpf.FieldsContainer() + >>> op.inputs.bending_moment_z.connect(my_bending_moment_z) + """ + + def __init__(self, op: Operator): + super().__init__(members_in_bending_not_certified._spec().inputs, op) + self._time_scoping = Input( + members_in_bending_not_certified._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._field_yield_strength = Input( + members_in_bending_not_certified._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._field_yield_strength) + self._class_cross_section = Input( + members_in_bending_not_certified._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._class_cross_section) + self._streams = Input( + members_in_bending_not_certified._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams) + self._data_sources = Input( + members_in_bending_not_certified._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._partial_factor = Input( + members_in_bending_not_certified._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._partial_factor) + self._mesh = Input( + members_in_bending_not_certified._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._mesh) + self._bending_moment_y = Input( + members_in_bending_not_certified._spec().input_pin(8), 8, op, -1 + ) + self._inputs.append(self._bending_moment_y) + self._bending_moment_z = Input( + members_in_bending_not_certified._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._bending_moment_z) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Parameters + ---------- + my_time_scoping : Scoping or int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def field_yield_strength(self): + """Allows to connect field_yield_strength input to the operator. + + This pin contains field of beam's yield + strength defined by the user. + + Parameters + ---------- + my_field_yield_strength : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> # or + >>> op.inputs.field_yield_strength(my_field_yield_strength) + """ + return self._field_yield_strength + + @property + def class_cross_section(self): + """Allows to connect class_cross_section input to the operator. + + Selection for a cross-section. true: class 1 + or 2 cross-sections. false: class 3 + cross section. if the user defines + the cross section as class 1 or 2, + the section modulus would be plastic + section modulus. if it's class 3- + cross section,the section modulus + would be elastic section modulus + + Parameters + ---------- + my_class_cross_section : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.class_cross_section.connect(my_class_cross_section) + >>> # or + >>> op.inputs.class_cross_section(my_class_cross_section) + """ + return self._class_cross_section + + @property + def streams(self): + """Allows to connect streams input to the operator. + + result file container allowed to be kept + open to cache data. + + Parameters + ---------- + my_streams : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.streams.connect(my_streams) + >>> # or + >>> op.inputs.streams(my_streams) + """ + return self._streams + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def partial_factor(self): + """Allows to connect partial_factor input to the operator. + + Partial safety factor for resistance of + members to instability assessed by + member checks. default value: 1. + + Parameters + ---------- + my_partial_factor : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> # or + >>> op.inputs.partial_factor(my_partial_factor) + """ + return self._partial_factor + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + mesh containing beam's properties defined by + user + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def bending_moment_y(self): + """Allows to connect bending_moment_y input to the operator. + + Fields container of bending moment on axis y + defined by user + + Parameters + ---------- + my_bending_moment_y : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.bending_moment_y.connect(my_bending_moment_y) + >>> # or + >>> op.inputs.bending_moment_y(my_bending_moment_y) + """ + return self._bending_moment_y + + @property + def bending_moment_z(self): + """Allows to connect bending_moment_z input to the operator. + + Fields container of bending moment on axis z + defined by user + + Parameters + ---------- + my_bending_moment_z : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> op.inputs.bending_moment_z.connect(my_bending_moment_z) + >>> # or + >>> op.inputs.bending_moment_z(my_bending_moment_z) + """ + return self._bending_moment_z + + +class OutputsMembersInBendingNotCertified(_Outputs): + """Intermediate class used to get outputs from + members_in_bending_not_certified operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> # Connect inputs : op.inputs. ... + >>> result_buckling_resistance_bending_yy = op.outputs.buckling_resistance_bending_yy() + >>> result_buckling_resistance_bending_zz = op.outputs.buckling_resistance_bending_zz() + """ + + def __init__(self, op: Operator): + super().__init__(members_in_bending_not_certified._spec().outputs, op) + self._buckling_resistance_bending_yy = Output( + members_in_bending_not_certified._spec().output_pin(0), 0, op + ) + self._outputs.append(self._buckling_resistance_bending_yy) + self._buckling_resistance_bending_zz = Output( + members_in_bending_not_certified._spec().output_pin(1), 1, op + ) + self._outputs.append(self._buckling_resistance_bending_zz) + + @property + def buckling_resistance_bending_yy(self): + """Allows to get buckling_resistance_bending_yy output of the operator + + Returns + ---------- + my_buckling_resistance_bending_yy : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> # Connect inputs : op.inputs. ... + >>> result_buckling_resistance_bending_yy = op.outputs.buckling_resistance_bending_yy() + """ # noqa: E501 + return self._buckling_resistance_bending_yy + + @property + def buckling_resistance_bending_zz(self): + """Allows to get buckling_resistance_bending_zz output of the operator + + Returns + ---------- + my_buckling_resistance_bending_zz : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_bending_not_certified() + >>> # Connect inputs : op.inputs. ... + >>> result_buckling_resistance_bending_zz = op.outputs.buckling_resistance_bending_zz() + """ # noqa: E501 + return self._buckling_resistance_bending_zz diff --git a/ansys/dpf/core/operators/result/members_in_compression_not_certified.py b/ansys/dpf/core/operators/result/members_in_compression_not_certified.py new file mode 100644 index 00000000000..19dc9aa8d7d --- /dev/null +++ b/ansys/dpf/core/operators/result/members_in_compression_not_certified.py @@ -0,0 +1,644 @@ +""" +members_in_compression_not_certified +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class members_in_compression_not_certified(Operator): + """This operator is a non-certified example of buckling resistance + verification for the compression members for Class I, 2 and 3 + cross-sections. It is only provided as an example if you want to + develop your own compute norm operator. The results computed by + this beta operator have not been certified by ANSYS. ANSYS + declines all responsibility for the use of this operator. + + Parameters + ---------- + time_scoping : Scoping or int, optional + Time/freq set ids (use ints or scoping) + field_yield_strength : DataSources or Field + This pin contains file csv or field of beam's + yield strength. + field_end_condition : DataSources or Field + This pin contains file csv or field of beam's + end condition defined by the user. if + no input at this pin found, it would + take end condition's value of all + beams as 1. + streams : StreamsContainer, optional + result file container allowed to be kept + open to cache data. + data_sources : DataSources, optional + Result file path container, used if no + streams are set. + manufacture : bool + Manufacturing processus:hot finished if true + or cold formed if false. default + value : hot finished. + partial_factor : float + Partial safety factor for resistance of + members to instability assessed by + member checks. default value: 1. + mesh : MeshedRegion + mesh containing beam's properties defined by + user + axial_force : FieldsContainer + Fields container of axial force defined by + user + fabrication_type : bool + If there is beam i in the structure, please + define its fabrication type. true: + rolled section, false: welded section + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.members_in_compression_not_certified() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_field_yield_strength = dpf.DataSources() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> my_field_end_condition = dpf.DataSources() + >>> op.inputs.field_end_condition.connect(my_field_end_condition) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_manufacture = bool() + >>> op.inputs.manufacture.connect(my_manufacture) + >>> my_partial_factor = float() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_axial_force = dpf.FieldsContainer() + >>> op.inputs.axial_force.connect(my_axial_force) + >>> my_fabrication_type = bool() + >>> op.inputs.fabrication_type.connect(my_fabrication_type) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.members_in_compression_not_certified( + ... time_scoping=my_time_scoping, + ... field_yield_strength=my_field_yield_strength, + ... field_end_condition=my_field_end_condition, + ... streams=my_streams, + ... data_sources=my_data_sources, + ... manufacture=my_manufacture, + ... partial_factor=my_partial_factor, + ... mesh=my_mesh, + ... axial_force=my_axial_force, + ... fabrication_type=my_fabrication_type, + ... ) + + >>> # Get output data + >>> result_buckling_resistance_compression_yy = op.outputs.buckling_resistance_compression_yy() + >>> result_buckling_resistance_compression_zz = op.outputs.buckling_resistance_compression_zz() + """ + + def __init__( + self, + time_scoping=None, + field_yield_strength=None, + field_end_condition=None, + streams=None, + data_sources=None, + manufacture=None, + partial_factor=None, + mesh=None, + axial_force=None, + fabrication_type=None, + config=None, + server=None, + ): + super().__init__( + name="members_in_compression_not_certified", config=config, server=server + ) + self._inputs = InputsMembersInCompressionNotCertified(self) + self._outputs = OutputsMembersInCompressionNotCertified(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if field_yield_strength is not None: + self.inputs.field_yield_strength.connect(field_yield_strength) + if field_end_condition is not None: + self.inputs.field_end_condition.connect(field_end_condition) + if streams is not None: + self.inputs.streams.connect(streams) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if manufacture is not None: + self.inputs.manufacture.connect(manufacture) + if partial_factor is not None: + self.inputs.partial_factor.connect(partial_factor) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if axial_force is not None: + self.inputs.axial_force.connect(axial_force) + if fabrication_type is not None: + self.inputs.fabrication_type.connect(fabrication_type) + + @staticmethod + def _spec(): + description = """This operator is a non-certified example of buckling resistance + verification for the compression members for Class I, 2 + and 3 cross-sections. It is only provided as an example if + you want to develop your own compute norm operator. The + results computed by this beta operator have not been + certified by ANSYS. ANSYS declines all responsibility for + the use of this operator.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector", "int32"], + optional=True, + document="""Time/freq set ids (use ints or scoping)""", + ), + 1: PinSpecification( + name="field_yield_strength", + type_names=["data_sources", "field"], + optional=False, + document="""This pin contains file csv or field of beam's + yield strength.""", + ), + 2: PinSpecification( + name="field_end_condition", + type_names=["data_sources", "field"], + optional=False, + document="""This pin contains file csv or field of beam's + end condition defined by the user. if + no input at this pin found, it would + take end condition's value of all + beams as 1.""", + ), + 3: PinSpecification( + name="streams", + type_names=["streams_container"], + optional=True, + document=""" result file container allowed to be kept + open to cache data.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Result file path container, used if no + streams are set.""", + ), + 5: PinSpecification( + name="manufacture", + type_names=["bool"], + optional=False, + document="""Manufacturing processus:hot finished if true + or cold formed if false. default + value : hot finished.""", + ), + 6: PinSpecification( + name="partial_factor", + type_names=["double"], + optional=False, + document="""Partial safety factor for resistance of + members to instability assessed by + member checks. default value: 1.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document=""" mesh containing beam's properties defined by + user""", + ), + 8: PinSpecification( + name="axial_force", + type_names=["fields_container"], + optional=False, + document="""Fields container of axial force defined by + user""", + ), + 12: PinSpecification( + name="fabrication_type", + type_names=["bool"], + optional=False, + document="""If there is beam i in the structure, please + define its fabrication type. true: + rolled section, false: welded section""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="buckling_resistance_compression_yy", + type_names=["fields_container"], + optional=False, + document="""Fields container of buckling resistance + factor on axis y-y in case of + compression. these factors should be + less than 1 and positive.""", + ), + 1: PinSpecification( + name="buckling_resistance_compression_zz", + type_names=["fields_container"], + optional=False, + document="""Fields container of buckling resistance + factor on axis z-z in case of + compression. these factors should be + less than 1 and positive.""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="members_in_compression_not_certified", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMembersInCompressionNotCertified + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMembersInCompressionNotCertified + """ + return super().outputs + + +class InputsMembersInCompressionNotCertified(_Inputs): + """Intermediate class used to connect user inputs to + members_in_compression_not_certified operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_field_yield_strength = dpf.DataSources() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> my_field_end_condition = dpf.DataSources() + >>> op.inputs.field_end_condition.connect(my_field_end_condition) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_manufacture = bool() + >>> op.inputs.manufacture.connect(my_manufacture) + >>> my_partial_factor = float() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_axial_force = dpf.FieldsContainer() + >>> op.inputs.axial_force.connect(my_axial_force) + >>> my_fabrication_type = bool() + >>> op.inputs.fabrication_type.connect(my_fabrication_type) + """ + + def __init__(self, op: Operator): + super().__init__(members_in_compression_not_certified._spec().inputs, op) + self._time_scoping = Input( + members_in_compression_not_certified._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_scoping) + self._field_yield_strength = Input( + members_in_compression_not_certified._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._field_yield_strength) + self._field_end_condition = Input( + members_in_compression_not_certified._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._field_end_condition) + self._streams = Input( + members_in_compression_not_certified._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams) + self._data_sources = Input( + members_in_compression_not_certified._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._manufacture = Input( + members_in_compression_not_certified._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._manufacture) + self._partial_factor = Input( + members_in_compression_not_certified._spec().input_pin(6), 6, op, -1 + ) + self._inputs.append(self._partial_factor) + self._mesh = Input( + members_in_compression_not_certified._spec().input_pin(7), 7, op, -1 + ) + self._inputs.append(self._mesh) + self._axial_force = Input( + members_in_compression_not_certified._spec().input_pin(8), 8, op, -1 + ) + self._inputs.append(self._axial_force) + self._fabrication_type = Input( + members_in_compression_not_certified._spec().input_pin(12), 12, op, -1 + ) + self._inputs.append(self._fabrication_type) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq set ids (use ints or scoping) + + Parameters + ---------- + my_time_scoping : Scoping or int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def field_yield_strength(self): + """Allows to connect field_yield_strength input to the operator. + + This pin contains file csv or field of beam's + yield strength. + + Parameters + ---------- + my_field_yield_strength : DataSources or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> # or + >>> op.inputs.field_yield_strength(my_field_yield_strength) + """ + return self._field_yield_strength + + @property + def field_end_condition(self): + """Allows to connect field_end_condition input to the operator. + + This pin contains file csv or field of beam's + end condition defined by the user. if + no input at this pin found, it would + take end condition's value of all + beams as 1. + + Parameters + ---------- + my_field_end_condition : DataSources or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.field_end_condition.connect(my_field_end_condition) + >>> # or + >>> op.inputs.field_end_condition(my_field_end_condition) + """ + return self._field_end_condition + + @property + def streams(self): + """Allows to connect streams input to the operator. + + result file container allowed to be kept + open to cache data. + + Parameters + ---------- + my_streams : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.streams.connect(my_streams) + >>> # or + >>> op.inputs.streams(my_streams) + """ + return self._streams + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def manufacture(self): + """Allows to connect manufacture input to the operator. + + Manufacturing processus:hot finished if true + or cold formed if false. default + value : hot finished. + + Parameters + ---------- + my_manufacture : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.manufacture.connect(my_manufacture) + >>> # or + >>> op.inputs.manufacture(my_manufacture) + """ + return self._manufacture + + @property + def partial_factor(self): + """Allows to connect partial_factor input to the operator. + + Partial safety factor for resistance of + members to instability assessed by + member checks. default value: 1. + + Parameters + ---------- + my_partial_factor : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> # or + >>> op.inputs.partial_factor(my_partial_factor) + """ + return self._partial_factor + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + mesh containing beam's properties defined by + user + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def axial_force(self): + """Allows to connect axial_force input to the operator. + + Fields container of axial force defined by + user + + Parameters + ---------- + my_axial_force : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.axial_force.connect(my_axial_force) + >>> # or + >>> op.inputs.axial_force(my_axial_force) + """ + return self._axial_force + + @property + def fabrication_type(self): + """Allows to connect fabrication_type input to the operator. + + If there is beam i in the structure, please + define its fabrication type. true: + rolled section, false: welded section + + Parameters + ---------- + my_fabrication_type : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> op.inputs.fabrication_type.connect(my_fabrication_type) + >>> # or + >>> op.inputs.fabrication_type(my_fabrication_type) + """ + return self._fabrication_type + + +class OutputsMembersInCompressionNotCertified(_Outputs): + """Intermediate class used to get outputs from + members_in_compression_not_certified operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> # Connect inputs : op.inputs. ... + >>> result_buckling_resistance_compression_yy = op.outputs.buckling_resistance_compression_yy() + >>> result_buckling_resistance_compression_zz = op.outputs.buckling_resistance_compression_zz() + """ + + def __init__(self, op: Operator): + super().__init__(members_in_compression_not_certified._spec().outputs, op) + self._buckling_resistance_compression_yy = Output( + members_in_compression_not_certified._spec().output_pin(0), 0, op + ) + self._outputs.append(self._buckling_resistance_compression_yy) + self._buckling_resistance_compression_zz = Output( + members_in_compression_not_certified._spec().output_pin(1), 1, op + ) + self._outputs.append(self._buckling_resistance_compression_zz) + + @property + def buckling_resistance_compression_yy(self): + """Allows to get buckling_resistance_compression_yy output of the operator + + Returns + ---------- + my_buckling_resistance_compression_yy : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> # Connect inputs : op.inputs. ... + >>> result_buckling_resistance_compression_yy = op.outputs.buckling_resistance_compression_yy() + """ # noqa: E501 + return self._buckling_resistance_compression_yy + + @property + def buckling_resistance_compression_zz(self): + """Allows to get buckling_resistance_compression_zz output of the operator + + Returns + ---------- + my_buckling_resistance_compression_zz : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_compression_not_certified() + >>> # Connect inputs : op.inputs. ... + >>> result_buckling_resistance_compression_zz = op.outputs.buckling_resistance_compression_zz() + """ # noqa: E501 + return self._buckling_resistance_compression_zz diff --git a/ansys/dpf/core/operators/result/members_in_linear_compression_bending_not_certified.py b/ansys/dpf/core/operators/result/members_in_linear_compression_bending_not_certified.py new file mode 100644 index 00000000000..d9f077ba80e --- /dev/null +++ b/ansys/dpf/core/operators/result/members_in_linear_compression_bending_not_certified.py @@ -0,0 +1,813 @@ +""" +members_in_linear_compression_bending_not_certified +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class members_in_linear_compression_bending_not_certified(Operator): + """This operator is a non-certified example of buckling resistance + verification for the compression and bending members for Class I, + 2 and 3 cross-sections. It is only provided as an example if you + want to develop your own compute norm operator. This norm is + linear summation of the utilization ratios of compression members + and bending members. The results computed by this beta operator + have not been certified by ANSYS. ANSYS declines all + responsibility for the use of this operator. + + Parameters + ---------- + time_scoping : Scoping or int, optional + field_yield_strength : Field + This pin contains field of beam's yield + strength defined by the user. + field_end_condition : DataSources or Field, optional + This pin contains file csv or field of beam's + end condition defined by the user. if + no input at this pin found, it would + take end conditions value of all + beams as 1 + streams : StreamsContainer, optional + result file container allowed to be kept + open to cache data. + data_sources : DataSources, optional + Result file path container, used if no + streams are set. + manufacture : bool, optional + Manufacturing processus:hot finished if true + or cold formed if false. default + value : hot finished. + partial_factor : float, optional + Partial factor for resistance of members to + instability assessed by member + checks. default value: 1.0 + mesh : MeshedRegion + mesh containing beam's properties defined by + user + bending_moment_y : FieldsContainer + Fields container of bending moment on axis y + defined by user + bending_moment_z : FieldsContainer + Fields container of bending moment on axis z + defined by user + axial_force : FieldsContainer + Fields container of axial force defined by + user + class_cross_section : bool + Selection for a cross-section. true: class 1 + or 2 cross-sections. false: class 3 + cross section. if the user defines + the cross section as class 1 or 2, + the section modulus would be plastic + section modulus. if it's class 3- + cross section,the section modulus + would be elastic section modulus + fabrication_type : bool, optional + Selection of fabrication's type if there are + beams i in the structure. true: + rolled section, false: welded + section. default: rolled section. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_field_yield_strength = dpf.Field() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> my_field_end_condition = dpf.DataSources() + >>> op.inputs.field_end_condition.connect(my_field_end_condition) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_manufacture = bool() + >>> op.inputs.manufacture.connect(my_manufacture) + >>> my_partial_factor = float() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_bending_moment_y = dpf.FieldsContainer() + >>> op.inputs.bending_moment_y.connect(my_bending_moment_y) + >>> my_bending_moment_z = dpf.FieldsContainer() + >>> op.inputs.bending_moment_z.connect(my_bending_moment_z) + >>> my_axial_force = dpf.FieldsContainer() + >>> op.inputs.axial_force.connect(my_axial_force) + >>> my_class_cross_section = bool() + >>> op.inputs.class_cross_section.connect(my_class_cross_section) + >>> my_fabrication_type = bool() + >>> op.inputs.fabrication_type.connect(my_fabrication_type) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified( + ... time_scoping=my_time_scoping, + ... field_yield_strength=my_field_yield_strength, + ... field_end_condition=my_field_end_condition, + ... streams=my_streams, + ... data_sources=my_data_sources, + ... manufacture=my_manufacture, + ... partial_factor=my_partial_factor, + ... mesh=my_mesh, + ... bending_moment_y=my_bending_moment_y, + ... bending_moment_z=my_bending_moment_z, + ... axial_force=my_axial_force, + ... class_cross_section=my_class_cross_section, + ... fabrication_type=my_fabrication_type, + ... ) + + >>> # Get output data + >>> result_buckling_resistance_linear_summation_utilization_ratios = op.outputs.buckling_resistance_linear_summation_utilization_ratios() + """ + + def __init__( + self, + time_scoping=None, + field_yield_strength=None, + field_end_condition=None, + streams=None, + data_sources=None, + manufacture=None, + partial_factor=None, + mesh=None, + bending_moment_y=None, + bending_moment_z=None, + axial_force=None, + class_cross_section=None, + fabrication_type=None, + config=None, + server=None, + ): + super().__init__( + name="members_in_linear_compression_bending_not_certified", + config=config, + server=server, + ) + self._inputs = InputsMembersInLinearCompressionBendingNotCertified(self) + self._outputs = OutputsMembersInLinearCompressionBendingNotCertified(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if field_yield_strength is not None: + self.inputs.field_yield_strength.connect(field_yield_strength) + if field_end_condition is not None: + self.inputs.field_end_condition.connect(field_end_condition) + if streams is not None: + self.inputs.streams.connect(streams) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if manufacture is not None: + self.inputs.manufacture.connect(manufacture) + if partial_factor is not None: + self.inputs.partial_factor.connect(partial_factor) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if bending_moment_y is not None: + self.inputs.bending_moment_y.connect(bending_moment_y) + if bending_moment_z is not None: + self.inputs.bending_moment_z.connect(bending_moment_z) + if axial_force is not None: + self.inputs.axial_force.connect(axial_force) + if class_cross_section is not None: + self.inputs.class_cross_section.connect(class_cross_section) + if fabrication_type is not None: + self.inputs.fabrication_type.connect(fabrication_type) + + @staticmethod + def _spec(): + description = """This operator is a non-certified example of buckling resistance + verification for the compression and bending members for + Class I, 2 and 3 cross-sections. It is only provided as an + example if you want to develop your own compute norm + operator. This norm is linear summation of the utilization + ratios of compression members and bending members. The + results computed by this beta operator have not been + certified by ANSYS. ANSYS declines all responsibility for + the use of this operator.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector", "int32"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="field_yield_strength", + type_names=["field"], + optional=False, + document="""This pin contains field of beam's yield + strength defined by the user.""", + ), + 2: PinSpecification( + name="field_end_condition", + type_names=["data_sources", "field"], + optional=True, + document="""This pin contains file csv or field of beam's + end condition defined by the user. if + no input at this pin found, it would + take end conditions value of all + beams as 1""", + ), + 3: PinSpecification( + name="streams", + type_names=["streams_container"], + optional=True, + document=""" result file container allowed to be kept + open to cache data.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""Result file path container, used if no + streams are set.""", + ), + 5: PinSpecification( + name="manufacture", + type_names=["bool"], + optional=True, + document="""Manufacturing processus:hot finished if true + or cold formed if false. default + value : hot finished.""", + ), + 6: PinSpecification( + name="partial_factor", + type_names=["double"], + optional=True, + document="""Partial factor for resistance of members to + instability assessed by member + checks. default value: 1.0""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document=""" mesh containing beam's properties defined by + user""", + ), + 8: PinSpecification( + name="bending_moment_y", + type_names=["fields_container"], + optional=False, + document="""Fields container of bending moment on axis y + defined by user""", + ), + 9: PinSpecification( + name="bending_moment_z", + type_names=["fields_container"], + optional=False, + document="""Fields container of bending moment on axis z + defined by user""", + ), + 10: PinSpecification( + name="axial_force", + type_names=["fields_container"], + optional=False, + document="""Fields container of axial force defined by + user""", + ), + 11: PinSpecification( + name="class_cross_section", + type_names=["bool"], + optional=False, + document="""Selection for a cross-section. true: class 1 + or 2 cross-sections. false: class 3 + cross section. if the user defines + the cross section as class 1 or 2, + the section modulus would be plastic + section modulus. if it's class 3- + cross section,the section modulus + would be elastic section modulus""", + ), + 12: PinSpecification( + name="fabrication_type", + type_names=["bool"], + optional=True, + document="""Selection of fabrication's type if there are + beams i in the structure. true: + rolled section, false: welded + section. default: rolled section.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="buckling_resistance_linear_summation_utilization_ratios", + type_names=["fields_container"], + optional=False, + document="""Linear summation of the utilization ratios in + all members submitted under a + combination of both bending and + compression. these factors should be + less than 1 and positive.""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="members_in_linear_compression_bending_not_certified", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMembersInLinearCompressionBendingNotCertified + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMembersInLinearCompressionBendingNotCertified + """ + return super().outputs + + +class InputsMembersInLinearCompressionBendingNotCertified(_Inputs): + """Intermediate class used to connect user inputs to + members_in_linear_compression_bending_not_certified operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_field_yield_strength = dpf.Field() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> my_field_end_condition = dpf.DataSources() + >>> op.inputs.field_end_condition.connect(my_field_end_condition) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_manufacture = bool() + >>> op.inputs.manufacture.connect(my_manufacture) + >>> my_partial_factor = float() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_bending_moment_y = dpf.FieldsContainer() + >>> op.inputs.bending_moment_y.connect(my_bending_moment_y) + >>> my_bending_moment_z = dpf.FieldsContainer() + >>> op.inputs.bending_moment_z.connect(my_bending_moment_z) + >>> my_axial_force = dpf.FieldsContainer() + >>> op.inputs.axial_force.connect(my_axial_force) + >>> my_class_cross_section = bool() + >>> op.inputs.class_cross_section.connect(my_class_cross_section) + >>> my_fabrication_type = bool() + >>> op.inputs.fabrication_type.connect(my_fabrication_type) + """ + + def __init__(self, op: Operator): + super().__init__( + members_in_linear_compression_bending_not_certified._spec().inputs, op + ) + self._time_scoping = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(0), + 0, + op, + -1, + ) + self._inputs.append(self._time_scoping) + self._field_yield_strength = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(1), + 1, + op, + -1, + ) + self._inputs.append(self._field_yield_strength) + self._field_end_condition = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(2), + 2, + op, + -1, + ) + self._inputs.append(self._field_end_condition) + self._streams = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(3), + 3, + op, + -1, + ) + self._inputs.append(self._streams) + self._data_sources = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(4), + 4, + op, + -1, + ) + self._inputs.append(self._data_sources) + self._manufacture = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(5), + 5, + op, + -1, + ) + self._inputs.append(self._manufacture) + self._partial_factor = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(6), + 6, + op, + -1, + ) + self._inputs.append(self._partial_factor) + self._mesh = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(7), + 7, + op, + -1, + ) + self._inputs.append(self._mesh) + self._bending_moment_y = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(8), + 8, + op, + -1, + ) + self._inputs.append(self._bending_moment_y) + self._bending_moment_z = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(9), + 9, + op, + -1, + ) + self._inputs.append(self._bending_moment_z) + self._axial_force = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(10), + 10, + op, + -1, + ) + self._inputs.append(self._axial_force) + self._class_cross_section = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(11), + 11, + op, + -1, + ) + self._inputs.append(self._class_cross_section) + self._fabrication_type = Input( + members_in_linear_compression_bending_not_certified._spec().input_pin(12), + 12, + op, + -1, + ) + self._inputs.append(self._fabrication_type) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Parameters + ---------- + my_time_scoping : Scoping or int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def field_yield_strength(self): + """Allows to connect field_yield_strength input to the operator. + + This pin contains field of beam's yield + strength defined by the user. + + Parameters + ---------- + my_field_yield_strength : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.field_yield_strength.connect(my_field_yield_strength) + >>> # or + >>> op.inputs.field_yield_strength(my_field_yield_strength) + """ + return self._field_yield_strength + + @property + def field_end_condition(self): + """Allows to connect field_end_condition input to the operator. + + This pin contains file csv or field of beam's + end condition defined by the user. if + no input at this pin found, it would + take end conditions value of all + beams as 1 + + Parameters + ---------- + my_field_end_condition : DataSources or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.field_end_condition.connect(my_field_end_condition) + >>> # or + >>> op.inputs.field_end_condition(my_field_end_condition) + """ + return self._field_end_condition + + @property + def streams(self): + """Allows to connect streams input to the operator. + + result file container allowed to be kept + open to cache data. + + Parameters + ---------- + my_streams : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.streams.connect(my_streams) + >>> # or + >>> op.inputs.streams(my_streams) + """ + return self._streams + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def manufacture(self): + """Allows to connect manufacture input to the operator. + + Manufacturing processus:hot finished if true + or cold formed if false. default + value : hot finished. + + Parameters + ---------- + my_manufacture : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.manufacture.connect(my_manufacture) + >>> # or + >>> op.inputs.manufacture(my_manufacture) + """ + return self._manufacture + + @property + def partial_factor(self): + """Allows to connect partial_factor input to the operator. + + Partial factor for resistance of members to + instability assessed by member + checks. default value: 1.0 + + Parameters + ---------- + my_partial_factor : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.partial_factor.connect(my_partial_factor) + >>> # or + >>> op.inputs.partial_factor(my_partial_factor) + """ + return self._partial_factor + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + mesh containing beam's properties defined by + user + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def bending_moment_y(self): + """Allows to connect bending_moment_y input to the operator. + + Fields container of bending moment on axis y + defined by user + + Parameters + ---------- + my_bending_moment_y : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.bending_moment_y.connect(my_bending_moment_y) + >>> # or + >>> op.inputs.bending_moment_y(my_bending_moment_y) + """ + return self._bending_moment_y + + @property + def bending_moment_z(self): + """Allows to connect bending_moment_z input to the operator. + + Fields container of bending moment on axis z + defined by user + + Parameters + ---------- + my_bending_moment_z : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.bending_moment_z.connect(my_bending_moment_z) + >>> # or + >>> op.inputs.bending_moment_z(my_bending_moment_z) + """ + return self._bending_moment_z + + @property + def axial_force(self): + """Allows to connect axial_force input to the operator. + + Fields container of axial force defined by + user + + Parameters + ---------- + my_axial_force : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.axial_force.connect(my_axial_force) + >>> # or + >>> op.inputs.axial_force(my_axial_force) + """ + return self._axial_force + + @property + def class_cross_section(self): + """Allows to connect class_cross_section input to the operator. + + Selection for a cross-section. true: class 1 + or 2 cross-sections. false: class 3 + cross section. if the user defines + the cross section as class 1 or 2, + the section modulus would be plastic + section modulus. if it's class 3- + cross section,the section modulus + would be elastic section modulus + + Parameters + ---------- + my_class_cross_section : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.class_cross_section.connect(my_class_cross_section) + >>> # or + >>> op.inputs.class_cross_section(my_class_cross_section) + """ + return self._class_cross_section + + @property + def fabrication_type(self): + """Allows to connect fabrication_type input to the operator. + + Selection of fabrication's type if there are + beams i in the structure. true: + rolled section, false: welded + section. default: rolled section. + + Parameters + ---------- + my_fabrication_type : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> op.inputs.fabrication_type.connect(my_fabrication_type) + >>> # or + >>> op.inputs.fabrication_type(my_fabrication_type) + """ + return self._fabrication_type + + +class OutputsMembersInLinearCompressionBendingNotCertified(_Outputs): + """Intermediate class used to get outputs from + members_in_linear_compression_bending_not_certified operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> # Connect inputs : op.inputs. ... + >>> result_buckling_resistance_linear_summation_utilization_ratios = op.outputs.buckling_resistance_linear_summation_utilization_ratios() + """ + + def __init__(self, op: Operator): + super().__init__( + members_in_linear_compression_bending_not_certified._spec().outputs, op + ) + self._buckling_resistance_linear_summation_utilization_ratios = Output( + members_in_linear_compression_bending_not_certified._spec().output_pin(0), + 0, + op, + ) + self._outputs.append( + self._buckling_resistance_linear_summation_utilization_ratios + ) + + @property + def buckling_resistance_linear_summation_utilization_ratios(self): + """Allows to get buckling_resistance_linear_summation_utilization_ratios output of the operator + + Returns + ---------- + my_buckling_resistance_linear_summation_utilization_ratios : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.members_in_linear_compression_bending_not_certified() + >>> # Connect inputs : op.inputs. ... + >>> result_buckling_resistance_linear_summation_utilization_ratios = op.outputs.buckling_resistance_linear_summation_utilization_ratios() + """ # noqa: E501 + return self._buckling_resistance_linear_summation_utilization_ratios diff --git a/ansys/dpf/core/operators/result/migrate_to_h5dpf.py b/ansys/dpf/core/operators/result/migrate_to_h5dpf.py index 876b31d60ff..18934398e25 100644 --- a/ansys/dpf/core/operators/result/migrate_to_h5dpf.py +++ b/ansys/dpf/core/operators/result/migrate_to_h5dpf.py @@ -1,84 +1,163 @@ """ migrate_to_h5dpf -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Hdf5 plugin, from "result" category -""" class migrate_to_h5dpf(Operator): - """Read mesh properties from the results files contained in the streams or data sources and make those properties available through a mesh selection manager in output. - - available inputs: - - filename (str) - - comma_separated_list_of_results (str) (optional) - - all_time_sets (bool) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) (optional) - - available outputs: - - migrated_file (DataSources) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.migrate_to_h5dpf() - - >>> # Make input connections - >>> my_filename = str() - >>> op.inputs.filename.connect(my_filename) - >>> my_comma_separated_list_of_results = str() - >>> op.inputs.comma_separated_list_of_results.connect(my_comma_separated_list_of_results) - >>> my_all_time_sets = bool() - >>> op.inputs.all_time_sets.connect(my_all_time_sets) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.migrate_to_h5dpf(filename=my_filename,comma_separated_list_of_results=my_comma_separated_list_of_results,all_time_sets=my_all_time_sets,streams_container=my_streams_container,data_sources=my_data_sources) - - >>> # Get output data - >>> result_migrated_file = op.outputs.migrated_file()""" - def __init__(self, filename=None, comma_separated_list_of_results=None, all_time_sets=None, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="hdf5::h5dpf::migrate_file", config = config, server = server) - self._inputs = InputsMigrateToH5dpf(self) - self._outputs = OutputsMigrateToH5dpf(self) - if filename !=None: + """Read mesh properties from the results files contained in the streams + or data sources and make those properties available through a mesh + selection manager in output. + + Parameters + ---------- + filename : str + Filename of the migrated file + comma_separated_list_of_results : str, optional + List of result (source operator names) that + will be stored. if empty, all + available results will be converted. + all_time_sets : bool, optional + Default is true + streams_container : StreamsContainer, optional + Streams (result file container) (optional) + data_sources : DataSources, optional + If the stream is null then we need to get the + file path from the data sources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.migrate_to_h5dpf() + + >>> # Make input connections + >>> my_filename = str() + >>> op.inputs.filename.connect(my_filename) + >>> my_comma_separated_list_of_results = str() + >>> op.inputs.comma_separated_list_of_results.connect(my_comma_separated_list_of_results) + >>> my_all_time_sets = bool() + >>> op.inputs.all_time_sets.connect(my_all_time_sets) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.migrate_to_h5dpf( + ... filename=my_filename, + ... comma_separated_list_of_results=my_comma_separated_list_of_results, + ... all_time_sets=my_all_time_sets, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_migrated_file = op.outputs.migrated_file() + """ + + def __init__( + self, + filename=None, + comma_separated_list_of_results=None, + all_time_sets=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__(name="hdf5::h5dpf::migrate_file", config=config, server=server) + self._inputs = InputsMigrateToH5Dpf(self) + self._outputs = OutputsMigrateToH5Dpf(self) + if filename is not None: self.inputs.filename.connect(filename) - if comma_separated_list_of_results !=None: - self.inputs.comma_separated_list_of_results.connect(comma_separated_list_of_results) - if all_time_sets !=None: + if comma_separated_list_of_results is not None: + self.inputs.comma_separated_list_of_results.connect( + comma_separated_list_of_results + ) + if all_time_sets is not None: self.inputs.all_time_sets.connect(all_time_sets) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Read mesh properties from the results files contained in the streams or data sources and make those properties available through a mesh selection manager in output.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "filename", type_names=["string"], optional=False, document="""filename of the migrated file"""), - 1 : PinSpecification(name = "comma_separated_list_of_results", type_names=["string"], optional=True, document="""list of result (source operator names) that will be stored. If empty, all available results will be converted."""), - 2 : PinSpecification(name = "all_time_sets", type_names=["bool"], optional=True, document="""default is true"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container) (optional)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=True, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "migrated_file", type_names=["data_sources"], optional=False, document="""""")}) + description = """Read mesh properties from the results files contained in the streams + or data sources and make those properties available + through a mesh selection manager in output.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="filename", + type_names=["string"], + optional=False, + document="""Filename of the migrated file""", + ), + 1: PinSpecification( + name="comma_separated_list_of_results", + type_names=["string"], + optional=True, + document="""List of result (source operator names) that + will be stored. if empty, all + available results will be converted.""", + ), + 2: PinSpecification( + name="all_time_sets", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container) (optional)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="migrated_file", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "hdf5::h5dpf::migrate_file") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="hdf5::h5dpf::migrate_file", server=server) @property def inputs(self): @@ -86,197 +165,192 @@ def inputs(self): Returns -------- - inputs : InputsMigrateToH5dpf + inputs : InputsMigrateToH5Dpf """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMigrateToH5dpf + outputs : OutputsMigrateToH5Dpf """ return super().outputs -#internal name: hdf5::h5dpf::migrate_file -#scripting name: migrate_to_h5dpf -class InputsMigrateToH5dpf(_Inputs): - """Intermediate class used to connect user inputs to migrate_to_h5dpf operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.migrate_to_h5dpf() - >>> my_filename = str() - >>> op.inputs.filename.connect(my_filename) - >>> my_comma_separated_list_of_results = str() - >>> op.inputs.comma_separated_list_of_results.connect(my_comma_separated_list_of_results) - >>> my_all_time_sets = bool() - >>> op.inputs.all_time_sets.connect(my_all_time_sets) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) +class InputsMigrateToH5Dpf(_Inputs): + """Intermediate class used to connect user inputs to + migrate_to_h5dpf operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.migrate_to_h5dpf() + >>> my_filename = str() + >>> op.inputs.filename.connect(my_filename) + >>> my_comma_separated_list_of_results = str() + >>> op.inputs.comma_separated_list_of_results.connect(my_comma_separated_list_of_results) + >>> my_all_time_sets = bool() + >>> op.inputs.all_time_sets.connect(my_all_time_sets) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(migrate_to_h5dpf._spec().inputs, op) - self._filename = Input(migrate_to_h5dpf._spec().input_pin(0), 0, op, -1) + self._filename = Input(migrate_to_h5dpf._spec().input_pin(0), 0, op, -1) self._inputs.append(self._filename) - self._comma_separated_list_of_results = Input(migrate_to_h5dpf._spec().input_pin(1), 1, op, -1) + self._comma_separated_list_of_results = Input( + migrate_to_h5dpf._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._comma_separated_list_of_results) - self._all_time_sets = Input(migrate_to_h5dpf._spec().input_pin(2), 2, op, -1) + self._all_time_sets = Input(migrate_to_h5dpf._spec().input_pin(2), 2, op, -1) self._inputs.append(self._all_time_sets) - self._streams_container = Input(migrate_to_h5dpf._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + migrate_to_h5dpf._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(migrate_to_h5dpf._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(migrate_to_h5dpf._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def filename(self): - """Allows to connect filename input to the operator + """Allows to connect filename input to the operator. - - pindoc: filename of the migrated file + Filename of the migrated file Parameters ---------- - my_filename : str, + my_filename : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.migrate_to_h5dpf() >>> op.inputs.filename.connect(my_filename) - >>> #or + >>> # or >>> op.inputs.filename(my_filename) - """ return self._filename @property def comma_separated_list_of_results(self): - """Allows to connect comma_separated_list_of_results input to the operator + """Allows to connect comma_separated_list_of_results input to the operator. - - pindoc: list of result (source operator names) that will be stored. If empty, all available results will be converted. + List of result (source operator names) that + will be stored. if empty, all + available results will be converted. Parameters ---------- - my_comma_separated_list_of_results : str, + my_comma_separated_list_of_results : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.migrate_to_h5dpf() >>> op.inputs.comma_separated_list_of_results.connect(my_comma_separated_list_of_results) - >>> #or + >>> # or >>> op.inputs.comma_separated_list_of_results(my_comma_separated_list_of_results) - """ return self._comma_separated_list_of_results @property def all_time_sets(self): - """Allows to connect all_time_sets input to the operator + """Allows to connect all_time_sets input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_all_time_sets : bool, + my_all_time_sets : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.migrate_to_h5dpf() >>> op.inputs.all_time_sets.connect(my_all_time_sets) - >>> #or + >>> # or >>> op.inputs.all_time_sets(my_all_time_sets) - """ return self._all_time_sets @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) (optional) + Streams (result file container) (optional) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.migrate_to_h5dpf() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.migrate_to_h5dpf() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources -class OutputsMigrateToH5dpf(_Outputs): - """Intermediate class used to get outputs from migrate_to_h5dpf operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.migrate_to_h5dpf() - >>> # Connect inputs : op.inputs. ... - >>> result_migrated_file = op.outputs.migrated_file() +class OutputsMigrateToH5Dpf(_Outputs): + """Intermediate class used to get outputs from + migrate_to_h5dpf operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.migrate_to_h5dpf() + >>> # Connect inputs : op.inputs. ... + >>> result_migrated_file = op.outputs.migrated_file() """ + def __init__(self, op: Operator): super().__init__(migrate_to_h5dpf._spec().outputs, op) - self._migrated_file = Output(migrate_to_h5dpf._spec().output_pin(0), 0, op) + self._migrated_file = Output(migrate_to_h5dpf._spec().output_pin(0), 0, op) self._outputs.append(self._migrated_file) @property def migrated_file(self): """Allows to get migrated_file output of the operator - Returns ---------- - my_migrated_file : DataSources, + my_migrated_file : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.migrate_to_h5dpf() >>> # Connect inputs : op.inputs. ... - >>> result_migrated_file = op.outputs.migrated_file() - """ + >>> result_migrated_file = op.outputs.migrated_file() + """ # noqa: E501 return self._migrated_file - diff --git a/ansys/dpf/core/operators/result/modal_basis.py b/ansys/dpf/core/operators/result/modal_basis.py index 327da10984c..f034f1682d7 100644 --- a/ansys/dpf/core/operators/result/modal_basis.py +++ b/ansys/dpf/core/operators/result/modal_basis.py @@ -1,92 +1,238 @@ """ modal_basis -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class modal_basis(Operator): - """Read/compute modal basis by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.modal_basis() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.modal_basis(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ModalBasis", config = config, server = server) + """Read/compute modal basis by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.modal_basis() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.modal_basis( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ModalBasis", config=config, server=server) self._inputs = InputsModalBasis(self) self._outputs = OutputsModalBasis(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute modal basis by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute modal basis by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ModalBasis") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ModalBasis", server=server) @property def inputs(self): @@ -94,275 +240,275 @@ def inputs(self): Returns -------- - inputs : InputsModalBasis + inputs : InputsModalBasis """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsModalBasis + outputs : OutputsModalBasis """ return super().outputs -#internal name: ModalBasis -#scripting name: modal_basis class InputsModalBasis(_Inputs): - """Intermediate class used to connect user inputs to modal_basis operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.modal_basis() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + modal_basis operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.modal_basis() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(modal_basis._spec().inputs, op) - self._time_scoping = Input(modal_basis._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(modal_basis._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(modal_basis._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(modal_basis._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(modal_basis._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(modal_basis._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(modal_basis._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(modal_basis._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(modal_basis._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(modal_basis._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(modal_basis._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(modal_basis._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(modal_basis._spec().input_pin(7), 7, op, -1) + self._mesh = Input(modal_basis._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(modal_basis._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(modal_basis._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsModalBasis(_Outputs): - """Intermediate class used to get outputs from modal_basis operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.modal_basis() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + modal_basis operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.modal_basis() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(modal_basis._spec().outputs, op) - self._fields_container = Output(modal_basis._spec().output_pin(0), 0, op) + self._fields_container = Output(modal_basis._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.modal_basis() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nmisc.py b/ansys/dpf/core/operators/result/nmisc.py index d3ad62a8b48..3f80518dc21 100644 --- a/ansys/dpf/core/operators/result/nmisc.py +++ b/ansys/dpf/core/operators/result/nmisc.py @@ -1,90 +1,192 @@ """ nmisc -===== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nmisc(Operator): """Read NMISC results from the rst file. - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nmisc() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nmisc(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::nmisc", config = config, server = server) + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + item_index : int + Index of requested item. + num_components : int, optional + Number of components for the requested item. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nmisc() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_item_index = int() + >>> op.inputs.item_index.connect(my_item_index) + >>> my_num_components = int() + >>> op.inputs.num_components.connect(my_num_components) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nmisc( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... item_index=my_item_index, + ... num_components=my_num_components, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + item_index=None, + num_components=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::nmisc", config=config, server=server) self._inputs = InputsNmisc(self) self._outputs = OutputsNmisc(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) + if item_index is not None: + self.inputs.item_index.connect(item_index) + if num_components is not None: + self.inputs.num_components.connect(num_components) @staticmethod def _spec(): - spec = Specification(description="""Read NMISC results from the rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read NMISC results from the rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 10: PinSpecification( + name="item_index", + type_names=["int32"], + optional=False, + document="""Index of requested item.""", + ), + 11: PinSpecification( + name="num_components", + type_names=["int32"], + optional=True, + document="""Number of components for the requested item.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::nmisc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::nmisc", server=server) @property def inputs(self): @@ -92,219 +194,252 @@ def inputs(self): Returns -------- - inputs : InputsNmisc + inputs : InputsNmisc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNmisc + outputs : OutputsNmisc """ return super().outputs -#internal name: mapdl::nmisc -#scripting name: nmisc class InputsNmisc(_Inputs): - """Intermediate class used to connect user inputs to nmisc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nmisc() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nmisc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nmisc() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_item_index = int() + >>> op.inputs.item_index.connect(my_item_index) + >>> my_num_components = int() + >>> op.inputs.num_components.connect(my_num_components) """ + def __init__(self, op: Operator): super().__init__(nmisc._spec().inputs, op) - self._time_scoping = Input(nmisc._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(nmisc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nmisc._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(nmisc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nmisc._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(nmisc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(nmisc._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(nmisc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(nmisc._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(nmisc._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._mesh = Input(nmisc._spec().input_pin(7), 7, op, -1) + self._mesh = Input(nmisc._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) + self._item_index = Input(nmisc._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._item_index) + self._num_components = Input(nmisc._spec().input_pin(11), 11, op, -1) + self._inputs.append(self._num_components) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nmisc() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nmisc() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nmisc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nmisc() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nmisc() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nmisc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + @property + def item_index(self): + """Allows to connect item_index input to the operator. + + Index of requested item. + + Parameters + ---------- + my_item_index : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nmisc() + >>> op.inputs.item_index.connect(my_item_index) + >>> # or + >>> op.inputs.item_index(my_item_index) + """ + return self._item_index + + @property + def num_components(self): + """Allows to connect num_components input to the operator. + + Number of components for the requested item. + + Parameters + ---------- + my_num_components : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nmisc() + >>> op.inputs.num_components.connect(my_num_components) + >>> # or + >>> op.inputs.num_components(my_num_components) + """ + return self._num_components + + class OutputsNmisc(_Outputs): - """Intermediate class used to get outputs from nmisc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nmisc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nmisc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nmisc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nmisc._spec().outputs, op) - self._fields_container = Output(nmisc._spec().output_pin(0), 0, op) + self._fields_container = Output(nmisc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nmisc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_creep_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_creep_strains.py index 079fd46cd63..0040c1e444d 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_creep_strains.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_creep_strains.py @@ -1,90 +1,166 @@ """ nodal_averaged_creep_strains -============================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_creep_strains(Operator): - """Read nodal averaged creep strains as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_creep_strains(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NCR", config = config, server = server) + """Read nodal averaged creep strains as averaged nodal result from rst + file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_creep_strains() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_creep_strains( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NCR", config=config, server=server) self._inputs = InputsNodalAveragedCreepStrains(self) self._outputs = OutputsNodalAveragedCreepStrains(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged creep strains as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged creep strains as averaged nodal result from rst + file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NCR") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NCR", server=server) @property def inputs(self): @@ -92,219 +168,216 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedCreepStrains + inputs : InputsNodalAveragedCreepStrains """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedCreepStrains + outputs : OutputsNodalAveragedCreepStrains """ return super().outputs -#internal name: mapdl::rst::NCR -#scripting name: nodal_averaged_creep_strains class InputsNodalAveragedCreepStrains(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_creep_strains operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_creep_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_creep_strains() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_creep_strains._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_creep_strains._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_creep_strains._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_creep_strains._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_creep_strains._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_creep_strains._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_creep_strains._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_creep_strains._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_creep_strains._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_creep_strains._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_creep_strains._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_creep_strains._spec().input_pin(7), 7, op, -1) + self._mesh = Input(nodal_averaged_creep_strains._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedCreepStrains(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_creep_strains operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_creep_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_creep_strains() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_creep_strains._spec().outputs, op) - self._fields_container = Output(nodal_averaged_creep_strains._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_creep_strains._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_elastic_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_elastic_strains.py index a0e6bcfeb66..91aabd8ecec 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_elastic_strains.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_elastic_strains.py @@ -1,90 +1,166 @@ """ nodal_averaged_elastic_strains -============================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_elastic_strains(Operator): - """Read nodal averaged elastic strains as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_elastic_strains(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NPEL", config = config, server = server) + """Read nodal averaged elastic strains as averaged nodal result from rst + file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_elastic_strains() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_elastic_strains( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NPEL", config=config, server=server) self._inputs = InputsNodalAveragedElasticStrains(self) self._outputs = OutputsNodalAveragedElasticStrains(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged elastic strains as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged elastic strains as averaged nodal result from rst + file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NPEL") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NPEL", server=server) @property def inputs(self): @@ -92,219 +168,218 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedElasticStrains + inputs : InputsNodalAveragedElasticStrains """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedElasticStrains + outputs : OutputsNodalAveragedElasticStrains """ return super().outputs -#internal name: mapdl::rst::NPEL -#scripting name: nodal_averaged_elastic_strains class InputsNodalAveragedElasticStrains(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_elastic_strains operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_elastic_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_elastic_strains() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_elastic_strains._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_elastic_strains._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_elastic_strains._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_elastic_strains._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_elastic_strains._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_elastic_strains._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_elastic_strains._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_elastic_strains._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_elastic_strains._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_elastic_strains._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_elastic_strains._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_elastic_strains._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + nodal_averaged_elastic_strains._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedElasticStrains(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_elastic_strains operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_elastic_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_elastic_strains() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_elastic_strains._spec().outputs, op) - self._fields_container = Output(nodal_averaged_elastic_strains._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_elastic_strains._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_creep_strain.py b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_creep_strain.py index a3a956d2f6b..6bdd61a0186 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_creep_strain.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_creep_strain.py @@ -1,90 +1,166 @@ """ nodal_averaged_equivalent_creep_strain -====================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_equivalent_creep_strain(Operator): - """Read nodal averaged equivalent creep strain as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NCR_EQV", config = config, server = server) + """Read nodal averaged equivalent creep strain as averaged nodal result + from rst file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NCR_EQV", config=config, server=server) self._inputs = InputsNodalAveragedEquivalentCreepStrain(self) self._outputs = OutputsNodalAveragedEquivalentCreepStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged equivalent creep strain as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged equivalent creep strain as averaged nodal result + from rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NCR_EQV") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NCR_EQV", server=server) @property def inputs(self): @@ -92,219 +168,218 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedEquivalentCreepStrain + inputs : InputsNodalAveragedEquivalentCreepStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedEquivalentCreepStrain + outputs : OutputsNodalAveragedEquivalentCreepStrain """ return super().outputs -#internal name: mapdl::rst::NCR_EQV -#scripting name: nodal_averaged_equivalent_creep_strain class InputsNodalAveragedEquivalentCreepStrain(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_equivalent_creep_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_equivalent_creep_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_equivalent_creep_strain._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_equivalent_creep_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_equivalent_creep_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_equivalent_creep_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_equivalent_creep_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_equivalent_creep_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_equivalent_creep_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_equivalent_creep_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_equivalent_creep_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_equivalent_creep_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_equivalent_creep_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_equivalent_creep_strain._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + nodal_averaged_equivalent_creep_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedEquivalentCreepStrain(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_equivalent_creep_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_equivalent_creep_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_equivalent_creep_strain._spec().outputs, op) - self._fields_container = Output(nodal_averaged_equivalent_creep_strain._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_equivalent_creep_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_elastic_strain.py b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_elastic_strain.py index 04d145bd759..0574655c986 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_elastic_strain.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_elastic_strain.py @@ -1,90 +1,166 @@ """ nodal_averaged_equivalent_elastic_strain -======================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_equivalent_elastic_strain(Operator): - """Read nodal averaged equivalent elastic strain as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NPEL_EQV", config = config, server = server) + """Read nodal averaged equivalent elastic strain as averaged nodal result + from rst file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NPEL_EQV", config=config, server=server) self._inputs = InputsNodalAveragedEquivalentElasticStrain(self) self._outputs = OutputsNodalAveragedEquivalentElasticStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged equivalent elastic strain as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged equivalent elastic strain as averaged nodal result + from rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NPEL_EQV") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NPEL_EQV", server=server) @property def inputs(self): @@ -92,219 +168,218 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedEquivalentElasticStrain + inputs : InputsNodalAveragedEquivalentElasticStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedEquivalentElasticStrain + outputs : OutputsNodalAveragedEquivalentElasticStrain """ return super().outputs -#internal name: mapdl::rst::NPEL_EQV -#scripting name: nodal_averaged_equivalent_elastic_strain class InputsNodalAveragedEquivalentElasticStrain(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_equivalent_elastic_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_equivalent_elastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_equivalent_elastic_strain._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_equivalent_elastic_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_equivalent_elastic_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_equivalent_elastic_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_equivalent_elastic_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_equivalent_elastic_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_equivalent_elastic_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_equivalent_elastic_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_equivalent_elastic_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_equivalent_elastic_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_equivalent_elastic_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_equivalent_elastic_strain._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + nodal_averaged_equivalent_elastic_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedEquivalentElasticStrain(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_equivalent_elastic_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_equivalent_elastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_equivalent_elastic_strain._spec().outputs, op) - self._fields_container = Output(nodal_averaged_equivalent_elastic_strain._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_equivalent_elastic_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_plastic_strain.py b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_plastic_strain.py index 0335d970187..7c80123b90c 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_plastic_strain.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_plastic_strain.py @@ -1,90 +1,166 @@ """ nodal_averaged_equivalent_plastic_strain -======================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_equivalent_plastic_strain(Operator): - """Read nodal averaged equivalent plastic strain as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NPPL_EQV", config = config, server = server) + """Read nodal averaged equivalent plastic strain as averaged nodal result + from rst file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NPPL_EQV", config=config, server=server) self._inputs = InputsNodalAveragedEquivalentPlasticStrain(self) self._outputs = OutputsNodalAveragedEquivalentPlasticStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged equivalent plastic strain as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged equivalent plastic strain as averaged nodal result + from rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NPPL_EQV") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NPPL_EQV", server=server) @property def inputs(self): @@ -92,219 +168,218 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedEquivalentPlasticStrain + inputs : InputsNodalAveragedEquivalentPlasticStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedEquivalentPlasticStrain + outputs : OutputsNodalAveragedEquivalentPlasticStrain """ return super().outputs -#internal name: mapdl::rst::NPPL_EQV -#scripting name: nodal_averaged_equivalent_plastic_strain class InputsNodalAveragedEquivalentPlasticStrain(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_equivalent_plastic_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_equivalent_plastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_equivalent_plastic_strain._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_equivalent_plastic_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_equivalent_plastic_strain._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_equivalent_plastic_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_equivalent_plastic_strain._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_equivalent_plastic_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_equivalent_plastic_strain._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_equivalent_plastic_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_equivalent_plastic_strain._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_equivalent_plastic_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_equivalent_plastic_strain._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_equivalent_plastic_strain._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + nodal_averaged_equivalent_plastic_strain._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedEquivalentPlasticStrain(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_equivalent_plastic_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_equivalent_plastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_equivalent_plastic_strain._spec().outputs, op) - self._fields_container = Output(nodal_averaged_equivalent_plastic_strain._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_equivalent_plastic_strain._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_thermal_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_thermal_strains.py index 4585eac9c87..ff82277e0a2 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_thermal_strains.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_thermal_strains.py @@ -1,90 +1,166 @@ """ nodal_averaged_equivalent_thermal_strains -========================================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_equivalent_thermal_strains(Operator): - """Read nodal averaged equivalent thermal strains as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NTH_EQV", config = config, server = server) + """Read nodal averaged equivalent thermal strains as averaged nodal + result from rst file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NTH_EQV", config=config, server=server) self._inputs = InputsNodalAveragedEquivalentThermalStrains(self) self._outputs = OutputsNodalAveragedEquivalentThermalStrains(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged equivalent thermal strains as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged equivalent thermal strains as averaged nodal + result from rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NTH_EQV") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NTH_EQV", server=server) @property def inputs(self): @@ -92,219 +168,218 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedEquivalentThermalStrains + inputs : InputsNodalAveragedEquivalentThermalStrains """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedEquivalentThermalStrains + outputs : OutputsNodalAveragedEquivalentThermalStrains """ return super().outputs -#internal name: mapdl::rst::NTH_EQV -#scripting name: nodal_averaged_equivalent_thermal_strains class InputsNodalAveragedEquivalentThermalStrains(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_equivalent_thermal_strains operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_equivalent_thermal_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_equivalent_thermal_strains._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_equivalent_thermal_strains._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_equivalent_thermal_strains._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_equivalent_thermal_strains._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_equivalent_thermal_strains._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_equivalent_thermal_strains._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_equivalent_thermal_strains._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_equivalent_thermal_strains._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_equivalent_thermal_strains._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_equivalent_thermal_strains._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_equivalent_thermal_strains._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_equivalent_thermal_strains._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + nodal_averaged_equivalent_thermal_strains._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedEquivalentThermalStrains(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_equivalent_thermal_strains operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_equivalent_thermal_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_equivalent_thermal_strains._spec().outputs, op) - self._fields_container = Output(nodal_averaged_equivalent_thermal_strains._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_equivalent_thermal_strains._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_plastic_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_plastic_strains.py index 740736a11e3..2ec817171a9 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_plastic_strains.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_plastic_strains.py @@ -1,90 +1,166 @@ """ nodal_averaged_plastic_strains -============================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_plastic_strains(Operator): - """Read nodal averaged plastic strains as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_plastic_strains(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NPPL", config = config, server = server) + """Read nodal averaged plastic strains as averaged nodal result from rst + file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_plastic_strains() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_plastic_strains( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NPPL", config=config, server=server) self._inputs = InputsNodalAveragedPlasticStrains(self) self._outputs = OutputsNodalAveragedPlasticStrains(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged plastic strains as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged plastic strains as averaged nodal result from rst + file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NPPL") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NPPL", server=server) @property def inputs(self): @@ -92,219 +168,218 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedPlasticStrains + inputs : InputsNodalAveragedPlasticStrains """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedPlasticStrains + outputs : OutputsNodalAveragedPlasticStrains """ return super().outputs -#internal name: mapdl::rst::NPPL -#scripting name: nodal_averaged_plastic_strains class InputsNodalAveragedPlasticStrains(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_plastic_strains operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_plastic_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_plastic_strains() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_plastic_strains._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_plastic_strains._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_plastic_strains._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_plastic_strains._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_plastic_strains._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_plastic_strains._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_plastic_strains._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_plastic_strains._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_plastic_strains._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_plastic_strains._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_plastic_strains._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_plastic_strains._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + nodal_averaged_plastic_strains._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedPlasticStrains(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_plastic_strains operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_plastic_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_plastic_strains() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_plastic_strains._spec().outputs, op) - self._fields_container = Output(nodal_averaged_plastic_strains._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_plastic_strains._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_stresses.py b/ansys/dpf/core/operators/result/nodal_averaged_stresses.py index cc173e0a39f..5fe08d09fdc 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_stresses.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_stresses.py @@ -1,90 +1,166 @@ """ nodal_averaged_stresses -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_stresses(Operator): """Read nodal averaged stresses as averaged nodal result from rst file. - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_stresses() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_stresses(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NS", config = config, server = server) + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_stresses() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_stresses( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NS", config=config, server=server) self._inputs = InputsNodalAveragedStresses(self) self._outputs = OutputsNodalAveragedStresses(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged stresses as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = ( + """Read nodal averaged stresses as averaged nodal result from rst file.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NS") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NS", server=server) @property def inputs(self): @@ -92,219 +168,216 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedStresses + inputs : InputsNodalAveragedStresses """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedStresses + outputs : OutputsNodalAveragedStresses """ return super().outputs -#internal name: mapdl::rst::NS -#scripting name: nodal_averaged_stresses class InputsNodalAveragedStresses(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_stresses operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_stresses operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_stresses() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_stresses._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_stresses._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_stresses._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_stresses._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_stresses._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_stresses._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_stresses._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_stresses._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_stresses._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_stresses._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_stresses._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_stresses._spec().input_pin(7), 7, op, -1) + self._mesh = Input(nodal_averaged_stresses._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedStresses(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_stresses operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_stresses operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_stresses() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_stresses._spec().outputs, op) - self._fields_container = Output(nodal_averaged_stresses._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_stresses._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_thermal_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_thermal_strains.py index 21759ad95e4..54fc310307a 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_thermal_strains.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_thermal_strains.py @@ -1,90 +1,166 @@ """ nodal_averaged_thermal_strains -============================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_thermal_strains(Operator): - """Read nodal averaged thermal strains as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_thermal_strains(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NTH", config = config, server = server) + """Read nodal averaged thermal strains as averaged nodal result from rst + file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_thermal_strains() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_thermal_strains( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NTH", config=config, server=server) self._inputs = InputsNodalAveragedThermalStrains(self) self._outputs = OutputsNodalAveragedThermalStrains(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged thermal strains as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged thermal strains as averaged nodal result from rst + file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NTH") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NTH", server=server) @property def inputs(self): @@ -92,219 +168,218 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedThermalStrains + inputs : InputsNodalAveragedThermalStrains """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedThermalStrains + outputs : OutputsNodalAveragedThermalStrains """ return super().outputs -#internal name: mapdl::rst::NTH -#scripting name: nodal_averaged_thermal_strains class InputsNodalAveragedThermalStrains(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_thermal_strains operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_thermal_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_thermal_strains() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_thermal_strains._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_thermal_strains._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_thermal_strains._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_thermal_strains._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_thermal_strains._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_thermal_strains._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_thermal_strains._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_thermal_strains._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_thermal_strains._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_thermal_strains._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_thermal_strains._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_thermal_strains._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + nodal_averaged_thermal_strains._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedThermalStrains(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_thermal_strains operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_thermal_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_thermal_strains() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_thermal_strains._spec().outputs, op) - self._fields_container = Output(nodal_averaged_thermal_strains._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_thermal_strains._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_averaged_thermal_swelling_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_thermal_swelling_strains.py index 43a4e89ddc6..c8203715bec 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_thermal_swelling_strains.py +++ b/ansys/dpf/core/operators/result/nodal_averaged_thermal_swelling_strains.py @@ -1,90 +1,166 @@ """ nodal_averaged_thermal_swelling_strains -======================================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_averaged_thermal_swelling_strains(Operator): - """Read nodal averaged thermal swelling strains as averaged nodal result from rst file. - - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::rst::NTH_SWL", config = config, server = server) + """Read nodal averaged thermal swelling strains as averaged nodal result + from rst file. + + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::rst::NTH_SWL", config=config, server=server) self._inputs = InputsNodalAveragedThermalSwellingStrains(self) self._outputs = OutputsNodalAveragedThermalSwellingStrains(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Read nodal averaged thermal swelling strains as averaged nodal result from rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read nodal averaged thermal swelling strains as averaged nodal result + from rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::NTH_SWL") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::rst::NTH_SWL", server=server) @property def inputs(self): @@ -92,219 +168,218 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedThermalSwellingStrains + inputs : InputsNodalAveragedThermalSwellingStrains """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalAveragedThermalSwellingStrains + outputs : OutputsNodalAveragedThermalSwellingStrains """ return super().outputs -#internal name: mapdl::rst::NTH_SWL -#scripting name: nodal_averaged_thermal_swelling_strains class InputsNodalAveragedThermalSwellingStrains(_Inputs): - """Intermediate class used to connect user inputs to nodal_averaged_thermal_swelling_strains operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_averaged_thermal_swelling_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_thermal_swelling_strains._spec().inputs, op) - self._time_scoping = Input(nodal_averaged_thermal_swelling_strains._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + nodal_averaged_thermal_swelling_strains._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_averaged_thermal_swelling_strains._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + nodal_averaged_thermal_swelling_strains._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_averaged_thermal_swelling_strains._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_averaged_thermal_swelling_strains._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_averaged_thermal_swelling_strains._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_averaged_thermal_swelling_strains._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_averaged_thermal_swelling_strains._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_averaged_thermal_swelling_strains._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_thermal_swelling_strains._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + nodal_averaged_thermal_swelling_strains._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalAveragedThermalSwellingStrains(_Outputs): - """Intermediate class used to get outputs from nodal_averaged_thermal_swelling_strains operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_averaged_thermal_swelling_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_averaged_thermal_swelling_strains._spec().outputs, op) - self._fields_container = Output(nodal_averaged_thermal_swelling_strains._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_averaged_thermal_swelling_strains._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_force.py b/ansys/dpf/core/operators/result/nodal_force.py index b32a08ef023..7ad28cf5c24 100644 --- a/ansys/dpf/core/operators/result/nodal_force.py +++ b/ansys/dpf/core/operators/result/nodal_force.py @@ -1,92 +1,238 @@ """ nodal_force -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class nodal_force(Operator): - """Read/compute nodal forces by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_force() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_force(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="F", config = config, server = server) + """Read/compute nodal forces by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_force() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_force( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="F", config=config, server=server) self._inputs = InputsNodalForce(self) self._outputs = OutputsNodalForce(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal forces by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal forces by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "F") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="F", server=server) @property def inputs(self): @@ -94,275 +240,275 @@ def inputs(self): Returns -------- - inputs : InputsNodalForce + inputs : InputsNodalForce """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalForce + outputs : OutputsNodalForce """ return super().outputs -#internal name: F -#scripting name: nodal_force class InputsNodalForce(_Inputs): - """Intermediate class used to connect user inputs to nodal_force operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_force() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + nodal_force operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_force() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(nodal_force._spec().inputs, op) - self._time_scoping = Input(nodal_force._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(nodal_force._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(nodal_force._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_force._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(nodal_force._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_force._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(nodal_force._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_force._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(nodal_force._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(nodal_force._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(nodal_force._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_force._spec().input_pin(7), 7, op, -1) + self._mesh = Input(nodal_force._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(nodal_force._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(nodal_force._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsNodalForce(_Outputs): - """Intermediate class used to get outputs from nodal_force operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_force() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_force operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_force() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_force._spec().outputs, op) - self._fields_container = Output(nodal_force._spec().output_pin(0), 0, op) + self._fields_container = Output(nodal_force._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_force() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_moment.py b/ansys/dpf/core/operators/result/nodal_moment.py index 319a471343a..b2d5836b10d 100644 --- a/ansys/dpf/core/operators/result/nodal_moment.py +++ b/ansys/dpf/core/operators/result/nodal_moment.py @@ -1,92 +1,238 @@ """ nodal_moment -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class nodal_moment(Operator): - """Read/compute nodal moment by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_moment() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_moment(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="M", config = config, server = server) + """Read/compute nodal moment by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_moment() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_moment( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="M", config=config, server=server) self._inputs = InputsNodalMoment(self) self._outputs = OutputsNodalMoment(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal moment by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal moment by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "M") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="M", server=server) @property def inputs(self): @@ -94,275 +240,277 @@ def inputs(self): Returns -------- - inputs : InputsNodalMoment + inputs : InputsNodalMoment """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalMoment + outputs : OutputsNodalMoment """ return super().outputs -#internal name: M -#scripting name: nodal_moment class InputsNodalMoment(_Inputs): - """Intermediate class used to connect user inputs to nodal_moment operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_moment() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + nodal_moment operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_moment() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(nodal_moment._spec().inputs, op) - self._time_scoping = Input(nodal_moment._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(nodal_moment._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(nodal_moment._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(nodal_moment._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(nodal_moment._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(nodal_moment._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_moment._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(nodal_moment._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_moment._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(nodal_moment._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(nodal_moment._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + nodal_moment._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(nodal_moment._spec().input_pin(7), 7, op, -1) + self._mesh = Input(nodal_moment._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(nodal_moment._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(nodal_moment._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsNodalMoment(_Outputs): - """Intermediate class used to get outputs from nodal_moment operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_moment() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_moment operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_moment() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_moment._spec().outputs, op) - self._fields_container = Output(nodal_moment._spec().output_pin(0), 0, op) + self._fields_container = Output(nodal_moment._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_moment() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/nodal_rotation_by_euler_nodes.py b/ansys/dpf/core/operators/result/nodal_rotation_by_euler_nodes.py index 75effdafc1d..34b872d4be5 100644 --- a/ansys/dpf/core/operators/result/nodal_rotation_by_euler_nodes.py +++ b/ansys/dpf/core/operators/result/nodal_rotation_by_euler_nodes.py @@ -1,72 +1,131 @@ """ nodal_rotation_by_euler_nodes -============================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class nodal_rotation_by_euler_nodes(Operator): - """read Euler angles on nodes from the rst file and rotate the fields in the fieldsContainer. - - available inputs: - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes(fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="mapdl::rst::RotateNodalFCByEulerNodes", config = config, server = server) + """read Euler angles on nodes from the rst file and rotate the fields in + the fieldsContainer. + + Parameters + ---------- + fields_container : FieldsContainer, optional + streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper, optional + data_sources : DataSources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes( + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__( + name="mapdl::rst::RotateNodalFCByEulerNodes", config=config, server=server + ) self._inputs = InputsNodalRotationByEulerNodes(self) self._outputs = OutputsNodalRotationByEulerNodes(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""read Euler angles on nodes from the rst file and rotate the fields in the fieldsContainer.""", - map_input_pin_spec={ - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document=""""""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """read Euler angles on nodes from the rst file and rotate the fields in + the fieldsContainer.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="streams_container", + type_names=[ + "streams_container", + "stream", + "class dataProcessing::CRstFileWrapper", + ], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::RotateNodalFCByEulerNodes") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="mapdl::rst::RotateNodalFCByEulerNodes", server=server + ) @property def inputs(self): @@ -74,139 +133,140 @@ def inputs(self): Returns -------- - inputs : InputsNodalRotationByEulerNodes + inputs : InputsNodalRotationByEulerNodes """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalRotationByEulerNodes + outputs : OutputsNodalRotationByEulerNodes """ return super().outputs -#internal name: mapdl::rst::RotateNodalFCByEulerNodes -#scripting name: nodal_rotation_by_euler_nodes class InputsNodalRotationByEulerNodes(_Inputs): - """Intermediate class used to connect user inputs to nodal_rotation_by_euler_nodes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + nodal_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(nodal_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input(nodal_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + nodal_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(nodal_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + nodal_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(nodal_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + nodal_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsNodalRotationByEulerNodes(_Outputs): - """Intermediate class used to get outputs from nodal_rotation_by_euler_nodes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + nodal_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(nodal_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output(nodal_rotation_by_euler_nodes._spec().output_pin(0), 0, op) + self._fields_container = Output( + nodal_rotation_by_euler_nodes._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_rotation_by_euler_nodes() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/num_surface_status_changes.py b/ansys/dpf/core/operators/result/num_surface_status_changes.py index e07d54442aa..209a94d7531 100644 --- a/ansys/dpf/core/operators/result/num_surface_status_changes.py +++ b/ansys/dpf/core/operators/result/num_surface_status_changes.py @@ -1,98 +1,275 @@ """ num_surface_status_changes -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class num_surface_status_changes(Operator): - """Read/compute element total number of contact status changes during substep by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.num_surface_status_changes() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.num_surface_status_changes(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ECT_CNOS", config = config, server = server) + """Read/compute element total number of contact status changes during + substep by calling the readers defined by the datasources. + Regarding the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.num_surface_status_changes() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.num_surface_status_changes( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ECT_CNOS", config=config, server=server) self._inputs = InputsNumSurfaceStatusChanges(self) self._outputs = OutputsNumSurfaceStatusChanges(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element total number of contact status changes during substep by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element total number of contact status changes during + substep by calling the readers defined by the datasources. + Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ECT_CNOS") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ECT_CNOS", server=server) @property def inputs(self): @@ -100,301 +277,345 @@ def inputs(self): Returns -------- - inputs : InputsNumSurfaceStatusChanges + inputs : InputsNumSurfaceStatusChanges """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNumSurfaceStatusChanges + outputs : OutputsNumSurfaceStatusChanges """ return super().outputs -#internal name: ECT_CNOS -#scripting name: num_surface_status_changes class InputsNumSurfaceStatusChanges(_Inputs): - """Intermediate class used to connect user inputs to num_surface_status_changes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.num_surface_status_changes() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + num_surface_status_changes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.num_surface_status_changes() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(num_surface_status_changes._spec().inputs, op) - self._time_scoping = Input(num_surface_status_changes._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + num_surface_status_changes._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(num_surface_status_changes._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + num_surface_status_changes._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(num_surface_status_changes._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + num_surface_status_changes._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(num_surface_status_changes._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + num_surface_status_changes._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(num_surface_status_changes._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + num_surface_status_changes._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(num_surface_status_changes._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + num_surface_status_changes._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(num_surface_status_changes._spec().input_pin(7), 7, op, -1) + self._mesh = Input(num_surface_status_changes._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(num_surface_status_changes._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + num_surface_status_changes._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(num_surface_status_changes._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + num_surface_status_changes._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + num_surface_status_changes._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.num_surface_status_changes() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsNumSurfaceStatusChanges(_Outputs): - """Intermediate class used to get outputs from num_surface_status_changes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.num_surface_status_changes() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + num_surface_status_changes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.num_surface_status_changes() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(num_surface_status_changes._spec().outputs, op) - self._fields_container = Output(num_surface_status_changes._spec().output_pin(0), 0, op) + self._fields_container = Output( + num_surface_status_changes._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.num_surface_status_changes() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_state_variable.py b/ansys/dpf/core/operators/result/plastic_state_variable.py index e4f6c31c862..9687a9c711f 100644 --- a/ansys/dpf/core/operators/result/plastic_state_variable.py +++ b/ansys/dpf/core/operators/result/plastic_state_variable.py @@ -1,98 +1,274 @@ """ plastic_state_variable -====================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_state_variable(Operator): - """Read/compute element nodal plastic state variable by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_state_variable() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_state_variable(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_PSV", config = config, server = server) + """Read/compute element nodal plastic state variable by calling the + readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_state_variable() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_state_variable( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_PSV", config=config, server=server) self._inputs = InputsPlasticStateVariable(self) self._outputs = OutputsPlasticStateVariable(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal plastic state variable by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal plastic state variable by calling the + readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_PSV") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_PSV", server=server) @property def inputs(self): @@ -100,301 +276,345 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStateVariable + inputs : InputsPlasticStateVariable """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStateVariable + outputs : OutputsPlasticStateVariable """ return super().outputs -#internal name: ENL_PSV -#scripting name: plastic_state_variable class InputsPlasticStateVariable(_Inputs): - """Intermediate class used to connect user inputs to plastic_state_variable operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_state_variable() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_state_variable operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_state_variable() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(plastic_state_variable._spec().inputs, op) - self._time_scoping = Input(plastic_state_variable._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + plastic_state_variable._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_state_variable._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + plastic_state_variable._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_state_variable._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_state_variable._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_state_variable._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_state_variable._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_state_variable._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + plastic_state_variable._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_state_variable._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_state_variable._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_state_variable._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_state_variable._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_state_variable._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_state_variable._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_state_variable._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + plastic_state_variable._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + plastic_state_variable._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_state_variable() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsPlasticStateVariable(_Outputs): - """Intermediate class used to get outputs from plastic_state_variable operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_state_variable() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_state_variable operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_state_variable() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_state_variable._spec().outputs, op) - self._fields_container = Output(plastic_state_variable._spec().output_pin(0), 0, op) + self._fields_container = Output( + plastic_state_variable._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_state_variable() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain.py b/ansys/dpf/core/operators/result/plastic_strain.py index cc71b7c2e9e..57429ef6fc1 100644 --- a/ansys/dpf/core/operators/result/plastic_strain.py +++ b/ansys/dpf/core/operators/result/plastic_strain.py @@ -1,98 +1,274 @@ """ plastic_strain -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain(Operator): - """Read/compute element nodal component plastic strains by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPPL", config = config, server = server) + """Read/compute element nodal component plastic strains by calling the + readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="EPPL", config=config, server=server) self._inputs = InputsPlasticStrain(self) self._outputs = OutputsPlasticStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains by calling the + readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPL") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPL", server=server) @property def inputs(self): @@ -100,301 +276,327 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrain + inputs : InputsPlasticStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrain + outputs : OutputsPlasticStrain """ return super().outputs -#internal name: EPPL -#scripting name: plastic_strain class InputsPlasticStrain(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(plastic_strain._spec().inputs, op) - self._time_scoping = Input(plastic_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(plastic_strain._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(plastic_strain._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(plastic_strain._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(plastic_strain._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(plastic_strain._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(plastic_strain._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(plastic_strain._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(plastic_strain._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsPlasticStrain(_Outputs): - """Intermediate class used to get outputs from plastic_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain._spec().outputs, op) - self._fields_container = Output(plastic_strain._spec().output_pin(0), 0, op) + self._fields_container = Output(plastic_strain._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_X.py b/ansys/dpf/core/operators/result/plastic_strain_X.py index 8279340f19b..d08a9e99403 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_X.py +++ b/ansys/dpf/core/operators/result/plastic_strain_X.py @@ -1,98 +1,258 @@ """ plastic_strain_X -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_X(Operator): - """Read/compute element nodal component plastic strains XX normal component (00 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_X() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_X(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPPLX", config = config, server = server) + """Read/compute element nodal component plastic strains XX normal + component (00 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_X( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPLX", config=config, server=server) self._inputs = InputsPlasticStrainX(self) self._outputs = OutputsPlasticStrainX(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains XX normal component (00 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains XX normal + component (00 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPLX") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPLX", server=server) @property def inputs(self): @@ -100,301 +260,305 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainX + inputs : InputsPlasticStrainX """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainX + outputs : OutputsPlasticStrainX """ return super().outputs -#internal name: EPPLX -#scripting name: plastic_strain_X class InputsPlasticStrainX(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_X operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_X() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_strain_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_X._spec().inputs, op) - self._time_scoping = Input(plastic_strain_X._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(plastic_strain_X._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(plastic_strain_X._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_X._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(plastic_strain_X._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_X._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_X._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_X._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(plastic_strain_X._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_X._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_X._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_X._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_X._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_X._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(plastic_strain_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsPlasticStrainX(_Outputs): - """Intermediate class used to get outputs from plastic_strain_X operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_X() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_X._spec().outputs, op) - self._fields_container = Output(plastic_strain_X._spec().output_pin(0), 0, op) + self._fields_container = Output(plastic_strain_X._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_X() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_XY.py b/ansys/dpf/core/operators/result/plastic_strain_XY.py index a2ded9ec24b..401da18d207 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_XY.py +++ b/ansys/dpf/core/operators/result/plastic_strain_XY.py @@ -1,98 +1,258 @@ """ plastic_strain_XY -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_XY(Operator): - """Read/compute element nodal component plastic strains XY shear component (01 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_XY() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_XY(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPPLXY", config = config, server = server) - self._inputs = InputsPlasticStrainXY(self) - self._outputs = OutputsPlasticStrainXY(self) - if time_scoping !=None: + """Read/compute element nodal component plastic strains XY shear + component (01 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_XY() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_XY( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPLXY", config=config, server=server) + self._inputs = InputsPlasticStrainXy(self) + self._outputs = OutputsPlasticStrainXy(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains XY shear component (01 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains XY shear + component (01 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPLXY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPLXY", server=server) @property def inputs(self): @@ -100,301 +260,307 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainXY + inputs : InputsPlasticStrainXy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainXY + outputs : OutputsPlasticStrainXy """ return super().outputs -#internal name: EPPLXY -#scripting name: plastic_strain_XY -class InputsPlasticStrainXY(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_XY operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_XY() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsPlasticStrainXy(_Inputs): + """Intermediate class used to connect user inputs to + plastic_strain_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_XY() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_XY._spec().inputs, op) - self._time_scoping = Input(plastic_strain_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(plastic_strain_XY._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(plastic_strain_XY._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_XY._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_strain_XY._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_XY._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_XY._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_XY._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(plastic_strain_XY._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_XY._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_XY._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_XY._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_XY._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_XY._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_XY._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(plastic_strain_XY._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsPlasticStrainXY(_Outputs): - """Intermediate class used to get outputs from plastic_strain_XY operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsPlasticStrainXy(_Outputs): + """Intermediate class used to get outputs from + plastic_strain_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_XY._spec().outputs, op) - self._fields_container = Output(plastic_strain_XY._spec().output_pin(0), 0, op) + self._fields_container = Output(plastic_strain_XY._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XY() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_XZ.py b/ansys/dpf/core/operators/result/plastic_strain_XZ.py index 6dd72c6361d..de80b49b8cf 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_XZ.py +++ b/ansys/dpf/core/operators/result/plastic_strain_XZ.py @@ -1,98 +1,258 @@ """ plastic_strain_XZ -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_XZ(Operator): - """Read/compute element nodal component plastic strains XZ shear component (02 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_XZ() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_XZ(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPPLXZ", config = config, server = server) - self._inputs = InputsPlasticStrainXZ(self) - self._outputs = OutputsPlasticStrainXZ(self) - if time_scoping !=None: + """Read/compute element nodal component plastic strains XZ shear + component (02 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_XZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_XZ( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPLXZ", config=config, server=server) + self._inputs = InputsPlasticStrainXz(self) + self._outputs = OutputsPlasticStrainXz(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains XZ shear component (02 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains XZ shear + component (02 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPLXZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPLXZ", server=server) @property def inputs(self): @@ -100,301 +260,307 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainXZ + inputs : InputsPlasticStrainXz """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainXZ + outputs : OutputsPlasticStrainXz """ return super().outputs -#internal name: EPPLXZ -#scripting name: plastic_strain_XZ -class InputsPlasticStrainXZ(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_XZ operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_XZ() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsPlasticStrainXz(_Inputs): + """Intermediate class used to connect user inputs to + plastic_strain_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_XZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_XZ._spec().inputs, op) - self._time_scoping = Input(plastic_strain_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(plastic_strain_XZ._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(plastic_strain_XZ._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_XZ._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_strain_XZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_XZ._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_XZ._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(plastic_strain_XZ._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_XZ._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_XZ._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_XZ._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_XZ._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_XZ._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_XZ._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(plastic_strain_XZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsPlasticStrainXZ(_Outputs): - """Intermediate class used to get outputs from plastic_strain_XZ operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsPlasticStrainXz(_Outputs): + """Intermediate class used to get outputs from + plastic_strain_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_XZ._spec().outputs, op) - self._fields_container = Output(plastic_strain_XZ._spec().output_pin(0), 0, op) + self._fields_container = Output(plastic_strain_XZ._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_XZ() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_Y.py b/ansys/dpf/core/operators/result/plastic_strain_Y.py index 6c706aadc22..524ec509424 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_Y.py +++ b/ansys/dpf/core/operators/result/plastic_strain_Y.py @@ -1,98 +1,258 @@ """ plastic_strain_Y -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_Y(Operator): - """Read/compute element nodal component plastic strains YY normal component (11 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_Y() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_Y(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPPLY", config = config, server = server) + """Read/compute element nodal component plastic strains YY normal + component (11 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_Y( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPLY", config=config, server=server) self._inputs = InputsPlasticStrainY(self) self._outputs = OutputsPlasticStrainY(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains YY normal component (11 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains YY normal + component (11 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPLY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPLY", server=server) @property def inputs(self): @@ -100,301 +260,305 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainY + inputs : InputsPlasticStrainY """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainY + outputs : OutputsPlasticStrainY """ return super().outputs -#internal name: EPPLY -#scripting name: plastic_strain_Y class InputsPlasticStrainY(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_Y operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_Y() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_strain_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_Y._spec().inputs, op) - self._time_scoping = Input(plastic_strain_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(plastic_strain_Y._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(plastic_strain_Y._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_Y._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(plastic_strain_Y._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_Y._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_Y._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_Y._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(plastic_strain_Y._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_Y._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_Y._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_Y._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_Y._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_Y._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(plastic_strain_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsPlasticStrainY(_Outputs): - """Intermediate class used to get outputs from plastic_strain_Y operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_Y() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_Y._spec().outputs, op) - self._fields_container = Output(plastic_strain_Y._spec().output_pin(0), 0, op) + self._fields_container = Output(plastic_strain_Y._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Y() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_YZ.py b/ansys/dpf/core/operators/result/plastic_strain_YZ.py index b6413b60949..9e24b5f9dd8 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_YZ.py +++ b/ansys/dpf/core/operators/result/plastic_strain_YZ.py @@ -1,98 +1,258 @@ """ plastic_strain_YZ -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_YZ(Operator): - """Read/compute element nodal component plastic strains YZ shear component (12 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_YZ() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_YZ(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPPLYZ", config = config, server = server) - self._inputs = InputsPlasticStrainYZ(self) - self._outputs = OutputsPlasticStrainYZ(self) - if time_scoping !=None: + """Read/compute element nodal component plastic strains YZ shear + component (12 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_YZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_YZ( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPLYZ", config=config, server=server) + self._inputs = InputsPlasticStrainYz(self) + self._outputs = OutputsPlasticStrainYz(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains YZ shear component (12 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains YZ shear + component (12 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPLYZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPLYZ", server=server) @property def inputs(self): @@ -100,301 +260,307 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainYZ + inputs : InputsPlasticStrainYz """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainYZ + outputs : OutputsPlasticStrainYz """ return super().outputs -#internal name: EPPLYZ -#scripting name: plastic_strain_YZ -class InputsPlasticStrainYZ(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_YZ operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_YZ() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsPlasticStrainYz(_Inputs): + """Intermediate class used to connect user inputs to + plastic_strain_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_YZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_YZ._spec().inputs, op) - self._time_scoping = Input(plastic_strain_YZ._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(plastic_strain_YZ._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_YZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(plastic_strain_YZ._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_YZ._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_strain_YZ._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_YZ._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_YZ._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(plastic_strain_YZ._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_YZ._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_YZ._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_YZ._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_YZ._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_YZ._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_YZ._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_YZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(plastic_strain_YZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsPlasticStrainYZ(_Outputs): - """Intermediate class used to get outputs from plastic_strain_YZ operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsPlasticStrainYz(_Outputs): + """Intermediate class used to get outputs from + plastic_strain_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_YZ._spec().outputs, op) - self._fields_container = Output(plastic_strain_YZ._spec().output_pin(0), 0, op) + self._fields_container = Output(plastic_strain_YZ._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_YZ() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_Z.py b/ansys/dpf/core/operators/result/plastic_strain_Z.py index 9a4a72b12b7..608e31838e5 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_Z.py +++ b/ansys/dpf/core/operators/result/plastic_strain_Z.py @@ -1,98 +1,258 @@ """ plastic_strain_Z -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_Z(Operator): - """Read/compute element nodal component plastic strains ZZ normal component (22 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_Z() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_Z(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="EPPLZ", config = config, server = server) + """Read/compute element nodal component plastic strains ZZ normal + component (22 component) by calling the readers defined by the + datasources. Regarding the requested location and the input mesh + scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_Z( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPLZ", config=config, server=server) self._inputs = InputsPlasticStrainZ(self) self._outputs = OutputsPlasticStrainZ(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains ZZ normal component (22 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains ZZ normal + component (22 component) by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPLZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPLZ", server=server) @property def inputs(self): @@ -100,301 +260,305 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainZ + inputs : InputsPlasticStrainZ """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainZ + outputs : OutputsPlasticStrainZ """ return super().outputs -#internal name: EPPLZ -#scripting name: plastic_strain_Z class InputsPlasticStrainZ(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_Z operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_Z() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_strain_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_Z._spec().inputs, op) - self._time_scoping = Input(plastic_strain_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(plastic_strain_Z._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(plastic_strain_Z._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_Z._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(plastic_strain_Z._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_Z._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_Z._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_Z._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(plastic_strain_Z._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_Z._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_Z._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_Z._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_Z._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_Z._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(plastic_strain_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsPlasticStrainZ(_Outputs): - """Intermediate class used to get outputs from plastic_strain_Z operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_Z() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_Z._spec().outputs, op) - self._fields_container = Output(plastic_strain_Z._spec().output_pin(0), 0, op) + self._fields_container = Output(plastic_strain_Z._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_Z() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_energy_density.py b/ansys/dpf/core/operators/result/plastic_strain_energy_density.py index fb425c595d2..d2d8ba1d32c 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_energy_density.py +++ b/ansys/dpf/core/operators/result/plastic_strain_energy_density.py @@ -1,98 +1,274 @@ """ plastic_strain_energy_density -============================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_energy_density(Operator): - """Read/compute element nodal plastic strain energy density by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_energy_density() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_energy_density(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_PLWK", config = config, server = server) + """Read/compute element nodal plastic strain energy density by calling + the readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_energy_density() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_energy_density( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_PLWK", config=config, server=server) self._inputs = InputsPlasticStrainEnergyDensity(self) self._outputs = OutputsPlasticStrainEnergyDensity(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal plastic strain energy density by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal plastic strain energy density by calling + the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_PLWK") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_PLWK", server=server) @property def inputs(self): @@ -100,301 +276,347 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainEnergyDensity + inputs : InputsPlasticStrainEnergyDensity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainEnergyDensity + outputs : OutputsPlasticStrainEnergyDensity """ return super().outputs -#internal name: ENL_PLWK -#scripting name: plastic_strain_energy_density class InputsPlasticStrainEnergyDensity(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_energy_density operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_energy_density() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_strain_energy_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_energy_density() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_energy_density._spec().inputs, op) - self._time_scoping = Input(plastic_strain_energy_density._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + plastic_strain_energy_density._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_energy_density._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + plastic_strain_energy_density._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_energy_density._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_strain_energy_density._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_energy_density._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_energy_density._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_energy_density._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + plastic_strain_energy_density._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_energy_density._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_energy_density._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_energy_density._spec().input_pin(7), 7, op, -1) + self._mesh = Input( + plastic_strain_energy_density._spec().input_pin(7), 7, op, -1 + ) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_energy_density._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_energy_density._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_energy_density._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + plastic_strain_energy_density._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + plastic_strain_energy_density._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_energy_density() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsPlasticStrainEnergyDensity(_Outputs): - """Intermediate class used to get outputs from plastic_strain_energy_density operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_energy_density() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain_energy_density operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_energy_density() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_energy_density._spec().outputs, op) - self._fields_container = Output(plastic_strain_energy_density._spec().output_pin(0), 0, op) + self._fields_container = Output( + plastic_strain_energy_density._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_energy_density() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_principal_1.py b/ansys/dpf/core/operators/result/plastic_strain_principal_1.py index b869834e42e..43097058139 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_principal_1.py +++ b/ansys/dpf/core/operators/result/plastic_strain_principal_1.py @@ -1,96 +1,253 @@ """ plastic_strain_principal_1 -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_principal_1(Operator): - """Read/compute element nodal component plastic strains 1st principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_principal_1() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_principal_1(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="EPPL1", config = config, server = server) + """Read/compute element nodal component plastic strains 1st principal + component by calling the readers defined by the datasources and + computing its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_principal_1() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_principal_1( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPL1", config=config, server=server) self._inputs = InputsPlasticStrainPrincipal1(self) self._outputs = OutputsPlasticStrainPrincipal1(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains 1st principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains 1st principal + component by calling the readers defined by the + datasources and computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPL1") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPL1", server=server) @property def inputs(self): @@ -98,299 +255,315 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainPrincipal1 + inputs : InputsPlasticStrainPrincipal1 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainPrincipal1 + outputs : OutputsPlasticStrainPrincipal1 """ return super().outputs -#internal name: EPPL1 -#scripting name: plastic_strain_principal_1 class InputsPlasticStrainPrincipal1(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_principal_1 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_principal_1() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_strain_principal_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_principal_1() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_principal_1._spec().inputs, op) - self._time_scoping = Input(plastic_strain_principal_1._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + plastic_strain_principal_1._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_principal_1._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + plastic_strain_principal_1._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_principal_1._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_strain_principal_1._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_principal_1._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_principal_1._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_principal_1._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + plastic_strain_principal_1._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_principal_1._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_principal_1._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_principal_1._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_principal_1._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_principal_1._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_principal_1._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_principal_1._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + plastic_strain_principal_1._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsPlasticStrainPrincipal1(_Outputs): - """Intermediate class used to get outputs from plastic_strain_principal_1 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_principal_1() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain_principal_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_principal_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_principal_1._spec().outputs, op) - self._fields_container = Output(plastic_strain_principal_1._spec().output_pin(0), 0, op) + self._fields_container = Output( + plastic_strain_principal_1._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_1() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_principal_2.py b/ansys/dpf/core/operators/result/plastic_strain_principal_2.py index b3a1d085622..e03d26c9fce 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_principal_2.py +++ b/ansys/dpf/core/operators/result/plastic_strain_principal_2.py @@ -1,96 +1,253 @@ """ plastic_strain_principal_2 -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_principal_2(Operator): - """Read/compute element nodal component plastic strains 2nd principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_principal_2() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_principal_2(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="EPPL2", config = config, server = server) + """Read/compute element nodal component plastic strains 2nd principal + component by calling the readers defined by the datasources and + computing its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_principal_2() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_principal_2( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPL2", config=config, server=server) self._inputs = InputsPlasticStrainPrincipal2(self) self._outputs = OutputsPlasticStrainPrincipal2(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains 2nd principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains 2nd principal + component by calling the readers defined by the + datasources and computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPL2") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPL2", server=server) @property def inputs(self): @@ -98,299 +255,315 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainPrincipal2 + inputs : InputsPlasticStrainPrincipal2 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainPrincipal2 + outputs : OutputsPlasticStrainPrincipal2 """ return super().outputs -#internal name: EPPL2 -#scripting name: plastic_strain_principal_2 class InputsPlasticStrainPrincipal2(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_principal_2 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_principal_2() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_strain_principal_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_principal_2() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_principal_2._spec().inputs, op) - self._time_scoping = Input(plastic_strain_principal_2._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + plastic_strain_principal_2._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_principal_2._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + plastic_strain_principal_2._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_principal_2._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_strain_principal_2._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_principal_2._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_principal_2._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_principal_2._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + plastic_strain_principal_2._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_principal_2._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_principal_2._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_principal_2._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_principal_2._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_principal_2._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_principal_2._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_principal_2._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + plastic_strain_principal_2._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsPlasticStrainPrincipal2(_Outputs): - """Intermediate class used to get outputs from plastic_strain_principal_2 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_principal_2() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain_principal_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_principal_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_principal_2._spec().outputs, op) - self._fields_container = Output(plastic_strain_principal_2._spec().output_pin(0), 0, op) + self._fields_container = Output( + plastic_strain_principal_2._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_2() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_principal_3.py b/ansys/dpf/core/operators/result/plastic_strain_principal_3.py index c25aebdb254..ede8fe0bc65 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_principal_3.py +++ b/ansys/dpf/core/operators/result/plastic_strain_principal_3.py @@ -1,96 +1,253 @@ """ plastic_strain_principal_3 -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class plastic_strain_principal_3(Operator): - """Read/compute element nodal component plastic strains 3rd principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_principal_3() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_principal_3(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="EPPL3", config = config, server = server) + """Read/compute element nodal component plastic strains 3rd principal + component by calling the readers defined by the datasources and + computing its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_principal_3() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_principal_3( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="EPPL3", config=config, server=server) self._inputs = InputsPlasticStrainPrincipal3(self) self._outputs = OutputsPlasticStrainPrincipal3(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component plastic strains 3rd principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component plastic strains 3rd principal + component by calling the readers defined by the + datasources and computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "EPPL3") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="EPPL3", server=server) @property def inputs(self): @@ -98,299 +255,315 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainPrincipal3 + inputs : InputsPlasticStrainPrincipal3 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainPrincipal3 + outputs : OutputsPlasticStrainPrincipal3 """ return super().outputs -#internal name: EPPL3 -#scripting name: plastic_strain_principal_3 class InputsPlasticStrainPrincipal3(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_principal_3 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_principal_3() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + plastic_strain_principal_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_principal_3() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_principal_3._spec().inputs, op) - self._time_scoping = Input(plastic_strain_principal_3._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + plastic_strain_principal_3._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(plastic_strain_principal_3._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + plastic_strain_principal_3._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(plastic_strain_principal_3._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_strain_principal_3._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_principal_3._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_principal_3._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_principal_3._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + plastic_strain_principal_3._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(plastic_strain_principal_3._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + plastic_strain_principal_3._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(plastic_strain_principal_3._spec().input_pin(7), 7, op, -1) + self._mesh = Input(plastic_strain_principal_3._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(plastic_strain_principal_3._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + plastic_strain_principal_3._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(plastic_strain_principal_3._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + plastic_strain_principal_3._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsPlasticStrainPrincipal3(_Outputs): - """Intermediate class used to get outputs from plastic_strain_principal_3 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_principal_3() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain_principal_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_principal_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_principal_3._spec().outputs, op) - self._fields_container = Output(plastic_strain_principal_3._spec().output_pin(0), 0, op) + self._fields_container = Output( + plastic_strain_principal_3._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_principal_3() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/plastic_strain_rotation_by_euler_nodes.py b/ansys/dpf/core/operators/result/plastic_strain_rotation_by_euler_nodes.py index fef25e59545..b45bbb2147d 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_rotation_by_euler_nodes.py +++ b/ansys/dpf/core/operators/result/plastic_strain_rotation_by_euler_nodes.py @@ -1,72 +1,133 @@ """ plastic_strain_rotation_by_euler_nodes -====================================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class plastic_strain_rotation_by_euler_nodes(Operator): - """read Euler angles on elements from the rst file and rotate the fields in the fieldsContainer. - - available inputs: - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes(fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="mapdl::rst::EPPL_rotation_by_euler_nodes", config = config, server = server) + """read Euler angles on elements from the rst file and rotate the fields + in the fieldsContainer. + + Parameters + ---------- + fields_container : FieldsContainer, optional + streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper, optional + data_sources : DataSources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes( + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__( + name="mapdl::rst::EPPL_rotation_by_euler_nodes", + config=config, + server=server, + ) self._inputs = InputsPlasticStrainRotationByEulerNodes(self) self._outputs = OutputsPlasticStrainRotationByEulerNodes(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""read Euler angles on elements from the rst file and rotate the fields in the fieldsContainer.""", - map_input_pin_spec={ - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document=""""""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """read Euler angles on elements from the rst file and rotate the fields + in the fieldsContainer.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="streams_container", + type_names=[ + "streams_container", + "stream", + "class dataProcessing::CRstFileWrapper", + ], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::EPPL_rotation_by_euler_nodes") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="mapdl::rst::EPPL_rotation_by_euler_nodes", server=server + ) @property def inputs(self): @@ -74,139 +135,140 @@ def inputs(self): Returns -------- - inputs : InputsPlasticStrainRotationByEulerNodes + inputs : InputsPlasticStrainRotationByEulerNodes """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPlasticStrainRotationByEulerNodes + outputs : OutputsPlasticStrainRotationByEulerNodes """ return super().outputs -#internal name: mapdl::rst::EPPL_rotation_by_euler_nodes -#scripting name: plastic_strain_rotation_by_euler_nodes class InputsPlasticStrainRotationByEulerNodes(_Inputs): - """Intermediate class used to connect user inputs to plastic_strain_rotation_by_euler_nodes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + plastic_strain_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(plastic_strain_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input(plastic_strain_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + plastic_strain_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(plastic_strain_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + plastic_strain_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(plastic_strain_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + plastic_strain_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsPlasticStrainRotationByEulerNodes(_Outputs): - """Intermediate class used to get outputs from plastic_strain_rotation_by_euler_nodes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + plastic_strain_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(plastic_strain_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output(plastic_strain_rotation_by_euler_nodes._spec().output_pin(0), 0, op) + self._fields_container = Output( + plastic_strain_rotation_by_euler_nodes._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.plastic_strain_rotation_by_euler_nodes() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/poynting_vector.py b/ansys/dpf/core/operators/result/poynting_vector.py index 0c8b1b8bab3..83dacb96f99 100644 --- a/ansys/dpf/core/operators/result/poynting_vector.py +++ b/ansys/dpf/core/operators/result/poynting_vector.py @@ -1,90 +1,167 @@ """ poynting_vector =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "result" category -""" class poynting_vector(Operator): """Compute the Poynting Vector - available inputs: - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - - fields_containerC (FieldsContainer) - - fields_containerD (FieldsContainer) - - meshed_region (MeshedRegion) (optional) - - int32 (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.poynting_vector() - - >>> # Make input connections - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> my_fields_containerC = dpf.FieldsContainer() - >>> op.inputs.fields_containerC.connect(my_fields_containerC) - >>> my_fields_containerD = dpf.FieldsContainer() - >>> op.inputs.fields_containerD.connect(my_fields_containerD) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> my_int32 = int() - >>> op.inputs.int32.connect(my_int32) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.poynting_vector(fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB,fields_containerC=my_fields_containerC,fields_containerD=my_fields_containerD,meshed_region=my_meshed_region,int32=my_int32) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_containerA=None, fields_containerB=None, fields_containerC=None, fields_containerD=None, meshed_region=None, int32=None, config=None, server=None): - super().__init__(name="PoyntingVector", config = config, server = server) + Parameters + ---------- + fields_containerA : FieldsContainer + fields_containerB : FieldsContainer + fields_containerC : FieldsContainer + fields_containerD : FieldsContainer + abstract_meshed_region : MeshedRegion, optional + The mesh region in this pin have to be + boundary or skin mesh + int32 : int, optional + Load step number, if it's specified, the + poynting vector is computed only on + the substeps of this step + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.poynting_vector() + + >>> # Make input connections + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> my_fields_containerC = dpf.FieldsContainer() + >>> op.inputs.fields_containerC.connect(my_fields_containerC) + >>> my_fields_containerD = dpf.FieldsContainer() + >>> op.inputs.fields_containerD.connect(my_fields_containerD) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_int32 = int() + >>> op.inputs.int32.connect(my_int32) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.poynting_vector( + ... fields_containerA=my_fields_containerA, + ... fields_containerB=my_fields_containerB, + ... fields_containerC=my_fields_containerC, + ... fields_containerD=my_fields_containerD, + ... abstract_meshed_region=my_abstract_meshed_region, + ... int32=my_int32, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_containerA=None, + fields_containerB=None, + fields_containerC=None, + fields_containerD=None, + abstract_meshed_region=None, + int32=None, + config=None, + server=None, + ): + super().__init__(name="PoyntingVector", config=config, server=server) self._inputs = InputsPoyntingVector(self) self._outputs = OutputsPoyntingVector(self) - if fields_containerA !=None: + if fields_containerA is not None: self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: + if fields_containerB is not None: self.inputs.fields_containerB.connect(fields_containerB) - if fields_containerC !=None: + if fields_containerC is not None: self.inputs.fields_containerC.connect(fields_containerC) - if fields_containerD !=None: + if fields_containerD is not None: self.inputs.fields_containerD.connect(fields_containerD) - if meshed_region !=None: - self.inputs.meshed_region.connect(meshed_region) - if int32 !=None: + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if int32 is not None: self.inputs.int32.connect(int32) @staticmethod def _spec(): - spec = Specification(description="""Compute the Poynting Vector""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "fields_containerC", type_names=["fields_container"], optional=False, document=""""""), - 3 : PinSpecification(name = "fields_containerD", type_names=["fields_container"], optional=False, document=""""""), - 4 : PinSpecification(name = "meshed_region", type_names=["abstract_meshed_region"], optional=True, document="""the mesh region in this pin have to be boundary or skin mesh"""), - 5 : PinSpecification(name = "int32", type_names=["int32"], optional=True, document="""load step number, if it's specified, the Poynting Vector is computed only on the substeps of this step""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Compute the Poynting Vector""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_containerA", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fields_containerB", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="fields_containerC", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="fields_containerD", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 4: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The mesh region in this pin have to be + boundary or skin mesh""", + ), + 5: PinSpecification( + name="int32", + type_names=["int32"], + optional=True, + document="""Load step number, if it's specified, the + poynting vector is computed only on + the substeps of this step""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "PoyntingVector") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="PoyntingVector", server=server) @property def inputs(self): @@ -92,215 +169,206 @@ def inputs(self): Returns -------- - inputs : InputsPoyntingVector + inputs : InputsPoyntingVector """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPoyntingVector + outputs : OutputsPoyntingVector """ return super().outputs -#internal name: PoyntingVector -#scripting name: poynting_vector class InputsPoyntingVector(_Inputs): - """Intermediate class used to connect user inputs to poynting_vector operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.poynting_vector() - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> my_fields_containerC = dpf.FieldsContainer() - >>> op.inputs.fields_containerC.connect(my_fields_containerC) - >>> my_fields_containerD = dpf.FieldsContainer() - >>> op.inputs.fields_containerD.connect(my_fields_containerD) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> my_int32 = int() - >>> op.inputs.int32.connect(my_int32) + """Intermediate class used to connect user inputs to + poynting_vector operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.poynting_vector() + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> my_fields_containerC = dpf.FieldsContainer() + >>> op.inputs.fields_containerC.connect(my_fields_containerC) + >>> my_fields_containerD = dpf.FieldsContainer() + >>> op.inputs.fields_containerD.connect(my_fields_containerD) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_int32 = int() + >>> op.inputs.int32.connect(my_int32) """ + def __init__(self, op: Operator): super().__init__(poynting_vector._spec().inputs, op) - self._fields_containerA = Input(poynting_vector._spec().input_pin(0), 0, op, -1) + self._fields_containerA = Input(poynting_vector._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(poynting_vector._spec().input_pin(1), 1, op, -1) + self._fields_containerB = Input(poynting_vector._spec().input_pin(1), 1, op, -1) self._inputs.append(self._fields_containerB) - self._fields_containerC = Input(poynting_vector._spec().input_pin(2), 2, op, -1) + self._fields_containerC = Input(poynting_vector._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_containerC) - self._fields_containerD = Input(poynting_vector._spec().input_pin(3), 3, op, -1) + self._fields_containerD = Input(poynting_vector._spec().input_pin(3), 3, op, -1) self._inputs.append(self._fields_containerD) - self._meshed_region = Input(poynting_vector._spec().input_pin(4), 4, op, -1) - self._inputs.append(self._meshed_region) - self._int32 = Input(poynting_vector._spec().input_pin(5), 5, op, -1) + self._abstract_meshed_region = Input( + poynting_vector._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._int32 = Input(poynting_vector._spec().input_pin(5), 5, op, -1) self._inputs.append(self._int32) @property def fields_containerA(self): - """Allows to connect fields_containerA input to the operator + """Allows to connect fields_containerA input to the operator. Parameters ---------- - my_fields_containerA : FieldsContainer, + my_fields_containerA : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector() >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or + >>> # or >>> op.inputs.fields_containerA(my_fields_containerA) - """ return self._fields_containerA @property def fields_containerB(self): - """Allows to connect fields_containerB input to the operator + """Allows to connect fields_containerB input to the operator. Parameters ---------- - my_fields_containerB : FieldsContainer, + my_fields_containerB : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector() >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or + >>> # or >>> op.inputs.fields_containerB(my_fields_containerB) - """ return self._fields_containerB @property def fields_containerC(self): - """Allows to connect fields_containerC input to the operator + """Allows to connect fields_containerC input to the operator. Parameters ---------- - my_fields_containerC : FieldsContainer, + my_fields_containerC : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector() >>> op.inputs.fields_containerC.connect(my_fields_containerC) - >>> #or + >>> # or >>> op.inputs.fields_containerC(my_fields_containerC) - """ return self._fields_containerC @property def fields_containerD(self): - """Allows to connect fields_containerD input to the operator + """Allows to connect fields_containerD input to the operator. Parameters ---------- - my_fields_containerD : FieldsContainer, + my_fields_containerD : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector() >>> op.inputs.fields_containerD.connect(my_fields_containerD) - >>> #or + >>> # or >>> op.inputs.fields_containerD(my_fields_containerD) - """ return self._fields_containerD @property - def meshed_region(self): - """Allows to connect meshed_region input to the operator + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. - - pindoc: the mesh region in this pin have to be boundary or skin mesh + The mesh region in this pin have to be + boundary or skin mesh Parameters ---------- - my_meshed_region : MeshedRegion, + my_abstract_meshed_region : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> #or - >>> op.inputs.meshed_region(my_meshed_region) - + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) """ - return self._meshed_region + return self._abstract_meshed_region @property def int32(self): - """Allows to connect int32 input to the operator + """Allows to connect int32 input to the operator. - - pindoc: load step number, if it's specified, the Poynting Vector is computed only on the substeps of this step + Load step number, if it's specified, the + poynting vector is computed only on + the substeps of this step Parameters ---------- - my_int32 : int, + my_int32 : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector() >>> op.inputs.int32.connect(my_int32) - >>> #or + >>> # or >>> op.inputs.int32(my_int32) - """ return self._int32 + class OutputsPoyntingVector(_Outputs): - """Intermediate class used to get outputs from poynting_vector operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.poynting_vector() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + poynting_vector operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.poynting_vector() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(poynting_vector._spec().outputs, op) - self._fields_container = Output(poynting_vector._spec().output_pin(0), 0, op) + self._fields_container = Output(poynting_vector._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/poynting_vector_surface.py b/ansys/dpf/core/operators/result/poynting_vector_surface.py index cf76c40811d..c0e639c2983 100644 --- a/ansys/dpf/core/operators/result/poynting_vector_surface.py +++ b/ansys/dpf/core/operators/result/poynting_vector_surface.py @@ -1,90 +1,167 @@ """ poynting_vector_surface -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "result" category -""" class poynting_vector_surface(Operator): """Compute the Poynting Vector surface integral - available inputs: - - fields_containerA (FieldsContainer) - - fields_containerB (FieldsContainer) - - fields_containerC (FieldsContainer) - - fields_containerD (FieldsContainer) - - meshed_region (MeshedRegion) (optional) - - int32 (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.poynting_vector_surface() - - >>> # Make input connections - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> my_fields_containerC = dpf.FieldsContainer() - >>> op.inputs.fields_containerC.connect(my_fields_containerC) - >>> my_fields_containerD = dpf.FieldsContainer() - >>> op.inputs.fields_containerD.connect(my_fields_containerD) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> my_int32 = int() - >>> op.inputs.int32.connect(my_int32) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.poynting_vector_surface(fields_containerA=my_fields_containerA,fields_containerB=my_fields_containerB,fields_containerC=my_fields_containerC,fields_containerD=my_fields_containerD,meshed_region=my_meshed_region,int32=my_int32) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_containerA=None, fields_containerB=None, fields_containerC=None, fields_containerD=None, meshed_region=None, int32=None, config=None, server=None): - super().__init__(name="PoyntingVectorSurface", config = config, server = server) + Parameters + ---------- + fields_containerA : FieldsContainer + fields_containerB : FieldsContainer + fields_containerC : FieldsContainer + fields_containerD : FieldsContainer + abstract_meshed_region : MeshedRegion, optional + The mesh region in this pin have to be + boundary or skin mesh + int32 : int, optional + Load step number, if it's specified, the + poynting vector is computed only on + the substeps of this step + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.poynting_vector_surface() + + >>> # Make input connections + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> my_fields_containerC = dpf.FieldsContainer() + >>> op.inputs.fields_containerC.connect(my_fields_containerC) + >>> my_fields_containerD = dpf.FieldsContainer() + >>> op.inputs.fields_containerD.connect(my_fields_containerD) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_int32 = int() + >>> op.inputs.int32.connect(my_int32) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.poynting_vector_surface( + ... fields_containerA=my_fields_containerA, + ... fields_containerB=my_fields_containerB, + ... fields_containerC=my_fields_containerC, + ... fields_containerD=my_fields_containerD, + ... abstract_meshed_region=my_abstract_meshed_region, + ... int32=my_int32, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_containerA=None, + fields_containerB=None, + fields_containerC=None, + fields_containerD=None, + abstract_meshed_region=None, + int32=None, + config=None, + server=None, + ): + super().__init__(name="PoyntingVectorSurface", config=config, server=server) self._inputs = InputsPoyntingVectorSurface(self) self._outputs = OutputsPoyntingVectorSurface(self) - if fields_containerA !=None: + if fields_containerA is not None: self.inputs.fields_containerA.connect(fields_containerA) - if fields_containerB !=None: + if fields_containerB is not None: self.inputs.fields_containerB.connect(fields_containerB) - if fields_containerC !=None: + if fields_containerC is not None: self.inputs.fields_containerC.connect(fields_containerC) - if fields_containerD !=None: + if fields_containerD is not None: self.inputs.fields_containerD.connect(fields_containerD) - if meshed_region !=None: - self.inputs.meshed_region.connect(meshed_region) - if int32 !=None: + if abstract_meshed_region is not None: + self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if int32 is not None: self.inputs.int32.connect(int32) @staticmethod def _spec(): - spec = Specification(description="""Compute the Poynting Vector surface integral""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_containerA", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "fields_containerB", type_names=["fields_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "fields_containerC", type_names=["fields_container"], optional=False, document=""""""), - 3 : PinSpecification(name = "fields_containerD", type_names=["fields_container"], optional=False, document=""""""), - 4 : PinSpecification(name = "meshed_region", type_names=["abstract_meshed_region"], optional=True, document="""the mesh region in this pin have to be boundary or skin mesh"""), - 5 : PinSpecification(name = "int32", type_names=["int32"], optional=True, document="""load step number, if it's specified, the Poynting Vector is computed only on the substeps of this step""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Compute the Poynting Vector surface integral""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_containerA", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="fields_containerB", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="fields_containerC", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="fields_containerD", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 4: PinSpecification( + name="abstract_meshed_region", + type_names=["abstract_meshed_region"], + optional=True, + document="""The mesh region in this pin have to be + boundary or skin mesh""", + ), + 5: PinSpecification( + name="int32", + type_names=["int32"], + optional=True, + document="""Load step number, if it's specified, the + poynting vector is computed only on + the substeps of this step""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "PoyntingVectorSurface") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="PoyntingVectorSurface", server=server) @property def inputs(self): @@ -92,215 +169,216 @@ def inputs(self): Returns -------- - inputs : InputsPoyntingVectorSurface + inputs : InputsPoyntingVectorSurface """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPoyntingVectorSurface + outputs : OutputsPoyntingVectorSurface """ return super().outputs -#internal name: PoyntingVectorSurface -#scripting name: poynting_vector_surface class InputsPoyntingVectorSurface(_Inputs): - """Intermediate class used to connect user inputs to poynting_vector_surface operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.poynting_vector_surface() - >>> my_fields_containerA = dpf.FieldsContainer() - >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> my_fields_containerB = dpf.FieldsContainer() - >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> my_fields_containerC = dpf.FieldsContainer() - >>> op.inputs.fields_containerC.connect(my_fields_containerC) - >>> my_fields_containerD = dpf.FieldsContainer() - >>> op.inputs.fields_containerD.connect(my_fields_containerD) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> my_int32 = int() - >>> op.inputs.int32.connect(my_int32) + """Intermediate class used to connect user inputs to + poynting_vector_surface operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.poynting_vector_surface() + >>> my_fields_containerA = dpf.FieldsContainer() + >>> op.inputs.fields_containerA.connect(my_fields_containerA) + >>> my_fields_containerB = dpf.FieldsContainer() + >>> op.inputs.fields_containerB.connect(my_fields_containerB) + >>> my_fields_containerC = dpf.FieldsContainer() + >>> op.inputs.fields_containerC.connect(my_fields_containerC) + >>> my_fields_containerD = dpf.FieldsContainer() + >>> op.inputs.fields_containerD.connect(my_fields_containerD) + >>> my_abstract_meshed_region = dpf.MeshedRegion() + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_int32 = int() + >>> op.inputs.int32.connect(my_int32) """ + def __init__(self, op: Operator): super().__init__(poynting_vector_surface._spec().inputs, op) - self._fields_containerA = Input(poynting_vector_surface._spec().input_pin(0), 0, op, -1) + self._fields_containerA = Input( + poynting_vector_surface._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_containerA) - self._fields_containerB = Input(poynting_vector_surface._spec().input_pin(1), 1, op, -1) + self._fields_containerB = Input( + poynting_vector_surface._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._fields_containerB) - self._fields_containerC = Input(poynting_vector_surface._spec().input_pin(2), 2, op, -1) + self._fields_containerC = Input( + poynting_vector_surface._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_containerC) - self._fields_containerD = Input(poynting_vector_surface._spec().input_pin(3), 3, op, -1) + self._fields_containerD = Input( + poynting_vector_surface._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._fields_containerD) - self._meshed_region = Input(poynting_vector_surface._spec().input_pin(4), 4, op, -1) - self._inputs.append(self._meshed_region) - self._int32 = Input(poynting_vector_surface._spec().input_pin(5), 5, op, -1) + self._abstract_meshed_region = Input( + poynting_vector_surface._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._abstract_meshed_region) + self._int32 = Input(poynting_vector_surface._spec().input_pin(5), 5, op, -1) self._inputs.append(self._int32) @property def fields_containerA(self): - """Allows to connect fields_containerA input to the operator + """Allows to connect fields_containerA input to the operator. Parameters ---------- - my_fields_containerA : FieldsContainer, + my_fields_containerA : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector_surface() >>> op.inputs.fields_containerA.connect(my_fields_containerA) - >>> #or + >>> # or >>> op.inputs.fields_containerA(my_fields_containerA) - """ return self._fields_containerA @property def fields_containerB(self): - """Allows to connect fields_containerB input to the operator + """Allows to connect fields_containerB input to the operator. Parameters ---------- - my_fields_containerB : FieldsContainer, + my_fields_containerB : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector_surface() >>> op.inputs.fields_containerB.connect(my_fields_containerB) - >>> #or + >>> # or >>> op.inputs.fields_containerB(my_fields_containerB) - """ return self._fields_containerB @property def fields_containerC(self): - """Allows to connect fields_containerC input to the operator + """Allows to connect fields_containerC input to the operator. Parameters ---------- - my_fields_containerC : FieldsContainer, + my_fields_containerC : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector_surface() >>> op.inputs.fields_containerC.connect(my_fields_containerC) - >>> #or + >>> # or >>> op.inputs.fields_containerC(my_fields_containerC) - """ return self._fields_containerC @property def fields_containerD(self): - """Allows to connect fields_containerD input to the operator + """Allows to connect fields_containerD input to the operator. Parameters ---------- - my_fields_containerD : FieldsContainer, + my_fields_containerD : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector_surface() >>> op.inputs.fields_containerD.connect(my_fields_containerD) - >>> #or + >>> # or >>> op.inputs.fields_containerD(my_fields_containerD) - """ return self._fields_containerD @property - def meshed_region(self): - """Allows to connect meshed_region input to the operator + def abstract_meshed_region(self): + """Allows to connect abstract_meshed_region input to the operator. - - pindoc: the mesh region in this pin have to be boundary or skin mesh + The mesh region in this pin have to be + boundary or skin mesh Parameters ---------- - my_meshed_region : MeshedRegion, + my_abstract_meshed_region : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector_surface() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> #or - >>> op.inputs.meshed_region(my_meshed_region) - + >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> # or + >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) """ - return self._meshed_region + return self._abstract_meshed_region @property def int32(self): - """Allows to connect int32 input to the operator + """Allows to connect int32 input to the operator. - - pindoc: load step number, if it's specified, the Poynting Vector is computed only on the substeps of this step + Load step number, if it's specified, the + poynting vector is computed only on + the substeps of this step Parameters ---------- - my_int32 : int, + my_int32 : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector_surface() >>> op.inputs.int32.connect(my_int32) - >>> #or + >>> # or >>> op.inputs.int32(my_int32) - """ return self._int32 + class OutputsPoyntingVectorSurface(_Outputs): - """Intermediate class used to get outputs from poynting_vector_surface operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.poynting_vector_surface() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + poynting_vector_surface operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.poynting_vector_surface() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(poynting_vector_surface._spec().outputs, op) - self._fields_container = Output(poynting_vector_surface._spec().output_pin(0), 0, op) + self._fields_container = Output( + poynting_vector_surface._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.poynting_vector_surface() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/pres_to_field.py b/ansys/dpf/core/operators/result/pres_to_field.py index 9cc632ffb3d..ddb7c869d93 100644 --- a/ansys/dpf/core/operators/result/pres_to_field.py +++ b/ansys/dpf/core/operators/result/pres_to_field.py @@ -1,60 +1,90 @@ """ pres_to_field -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class pres_to_field(Operator): """Read the presol generated file from mapdl. - available inputs: - - filepath (str) + Parameters + ---------- + filepath : str + Filepath + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.result.pres_to_field() - >>> # Instantiate operator - >>> op = dpf.operators.result.pres_to_field() + >>> # Make input connections + >>> my_filepath = str() + >>> op.inputs.filepath.connect(my_filepath) - >>> # Make input connections - >>> my_filepath = str() - >>> op.inputs.filepath.connect(my_filepath) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.pres_to_field( + ... filepath=my_filepath, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.pres_to_field(filepath=my_filepath) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, filepath=None, config=None, server=None): - super().__init__(name="PRES_Reader", config = config, server = server) + super().__init__(name="PRES_Reader", config=config, server=server) self._inputs = InputsPresToField(self) self._outputs = OutputsPresToField(self) - if filepath !=None: + if filepath is not None: self.inputs.filepath.connect(filepath) @staticmethod def _spec(): - spec = Specification(description="""Read the presol generated file from mapdl.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "filepath", type_names=["string"], optional=False, document="""filepath""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Read the presol generated file from mapdl.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="filepath", + type_names=["string"], + optional=False, + document="""Filepath""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "PRES_Reader") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="PRES_Reader", server=server) @property def inputs(self): @@ -62,93 +92,89 @@ def inputs(self): Returns -------- - inputs : InputsPresToField + inputs : InputsPresToField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPresToField + outputs : OutputsPresToField """ return super().outputs -#internal name: PRES_Reader -#scripting name: pres_to_field class InputsPresToField(_Inputs): - """Intermediate class used to connect user inputs to pres_to_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.pres_to_field() - >>> my_filepath = str() - >>> op.inputs.filepath.connect(my_filepath) + """Intermediate class used to connect user inputs to + pres_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.pres_to_field() + >>> my_filepath = str() + >>> op.inputs.filepath.connect(my_filepath) """ + def __init__(self, op: Operator): super().__init__(pres_to_field._spec().inputs, op) - self._filepath = Input(pres_to_field._spec().input_pin(0), 0, op, -1) + self._filepath = Input(pres_to_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._filepath) @property def filepath(self): - """Allows to connect filepath input to the operator + """Allows to connect filepath input to the operator. - - pindoc: filepath + Filepath Parameters ---------- - my_filepath : str, + my_filepath : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.pres_to_field() >>> op.inputs.filepath.connect(my_filepath) - >>> #or + >>> # or >>> op.inputs.filepath(my_filepath) - """ return self._filepath + class OutputsPresToField(_Outputs): - """Intermediate class used to get outputs from pres_to_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.pres_to_field() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + pres_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.pres_to_field() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(pres_to_field._spec().outputs, op) - self._field = Output(pres_to_field._spec().output_pin(0), 0, op) + self._field = Output(pres_to_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.pres_to_field() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/result/prns_to_field.py b/ansys/dpf/core/operators/result/prns_to_field.py index 9287016fa16..8e81934cf50 100644 --- a/ansys/dpf/core/operators/result/prns_to_field.py +++ b/ansys/dpf/core/operators/result/prns_to_field.py @@ -1,60 +1,90 @@ """ prns_to_field -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class prns_to_field(Operator): """Read the presol of nodal field generated file from mapdl. - available inputs: - - filepath (str) + Parameters + ---------- + filepath : str + Filepath + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.result.prns_to_field() - >>> # Instantiate operator - >>> op = dpf.operators.result.prns_to_field() + >>> # Make input connections + >>> my_filepath = str() + >>> op.inputs.filepath.connect(my_filepath) - >>> # Make input connections - >>> my_filepath = str() - >>> op.inputs.filepath.connect(my_filepath) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.prns_to_field( + ... filepath=my_filepath, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.prns_to_field(filepath=my_filepath) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, filepath=None, config=None, server=None): - super().__init__(name="PRNS_Reader", config = config, server = server) + super().__init__(name="PRNS_Reader", config=config, server=server) self._inputs = InputsPrnsToField(self) self._outputs = OutputsPrnsToField(self) - if filepath !=None: + if filepath is not None: self.inputs.filepath.connect(filepath) @staticmethod def _spec(): - spec = Specification(description="""Read the presol of nodal field generated file from mapdl.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "filepath", type_names=["string"], optional=False, document="""filepath""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Read the presol of nodal field generated file from mapdl.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="filepath", + type_names=["string"], + optional=False, + document="""Filepath""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "PRNS_Reader") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="PRNS_Reader", server=server) @property def inputs(self): @@ -62,93 +92,89 @@ def inputs(self): Returns -------- - inputs : InputsPrnsToField + inputs : InputsPrnsToField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPrnsToField + outputs : OutputsPrnsToField """ return super().outputs -#internal name: PRNS_Reader -#scripting name: prns_to_field class InputsPrnsToField(_Inputs): - """Intermediate class used to connect user inputs to prns_to_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.prns_to_field() - >>> my_filepath = str() - >>> op.inputs.filepath.connect(my_filepath) + """Intermediate class used to connect user inputs to + prns_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.prns_to_field() + >>> my_filepath = str() + >>> op.inputs.filepath.connect(my_filepath) """ + def __init__(self, op: Operator): super().__init__(prns_to_field._spec().inputs, op) - self._filepath = Input(prns_to_field._spec().input_pin(0), 0, op, -1) + self._filepath = Input(prns_to_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._filepath) @property def filepath(self): - """Allows to connect filepath input to the operator + """Allows to connect filepath input to the operator. - - pindoc: filepath + Filepath Parameters ---------- - my_filepath : str, + my_filepath : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.prns_to_field() >>> op.inputs.filepath.connect(my_filepath) - >>> #or + >>> # or >>> op.inputs.filepath(my_filepath) - """ return self._filepath + class OutputsPrnsToField(_Outputs): - """Intermediate class used to get outputs from prns_to_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.prns_to_field() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + prns_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.prns_to_field() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(prns_to_field._spec().outputs, op) - self._field = Output(prns_to_field._spec().output_pin(0), 0, op) + self._field = Output(prns_to_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.prns_to_field() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/result/raw_displacement.py b/ansys/dpf/core/operators/result/raw_displacement.py index 0b8bc83675b..4bfee9530ac 100644 --- a/ansys/dpf/core/operators/result/raw_displacement.py +++ b/ansys/dpf/core/operators/result/raw_displacement.py @@ -1,92 +1,238 @@ """ raw_displacement -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class raw_displacement(Operator): - """Read/compute U vector from the finite element problem KU=F by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.raw_displacement() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.raw_displacement(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="UTOT", config = config, server = server) + """Read/compute U vector from the finite element problem KU=F by calling + the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.raw_displacement() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.raw_displacement( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="UTOT", config=config, server=server) self._inputs = InputsRawDisplacement(self) self._outputs = OutputsRawDisplacement(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute U vector from the finite element problem KU=F by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute U vector from the finite element problem KU=F by calling + the readers defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "UTOT") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="UTOT", server=server) @property def inputs(self): @@ -94,275 +240,279 @@ def inputs(self): Returns -------- - inputs : InputsRawDisplacement + inputs : InputsRawDisplacement """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRawDisplacement + outputs : OutputsRawDisplacement """ return super().outputs -#internal name: UTOT -#scripting name: raw_displacement class InputsRawDisplacement(_Inputs): - """Intermediate class used to connect user inputs to raw_displacement operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.raw_displacement() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + raw_displacement operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.raw_displacement() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(raw_displacement._spec().inputs, op) - self._time_scoping = Input(raw_displacement._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(raw_displacement._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(raw_displacement._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(raw_displacement._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(raw_displacement._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(raw_displacement._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(raw_displacement._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + raw_displacement._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(raw_displacement._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(raw_displacement._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(raw_displacement._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + raw_displacement._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(raw_displacement._spec().input_pin(7), 7, op, -1) + self._mesh = Input(raw_displacement._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(raw_displacement._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(raw_displacement._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsRawDisplacement(_Outputs): - """Intermediate class used to get outputs from raw_displacement operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.raw_displacement() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + raw_displacement operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.raw_displacement() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(raw_displacement._spec().outputs, op) - self._fields_container = Output(raw_displacement._spec().output_pin(0), 0, op) + self._fields_container = Output(raw_displacement._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_displacement() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/raw_reaction_force.py b/ansys/dpf/core/operators/result/raw_reaction_force.py index 7992737d786..a979bff39a2 100644 --- a/ansys/dpf/core/operators/result/raw_reaction_force.py +++ b/ansys/dpf/core/operators/result/raw_reaction_force.py @@ -1,92 +1,238 @@ """ raw_reaction_force -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class raw_reaction_force(Operator): - """Read/compute F vector from the finite element problem KU=F by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.raw_reaction_force() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.raw_reaction_force(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="RFTOT", config = config, server = server) + """Read/compute F vector from the finite element problem KU=F by calling + the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.raw_reaction_force() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.raw_reaction_force( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="RFTOT", config=config, server=server) self._inputs = InputsRawReactionForce(self) self._outputs = OutputsRawReactionForce(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute F vector from the finite element problem KU=F by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute F vector from the finite element problem KU=F by calling + the readers defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "RFTOT") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="RFTOT", server=server) @property def inputs(self): @@ -94,275 +240,281 @@ def inputs(self): Returns -------- - inputs : InputsRawReactionForce + inputs : InputsRawReactionForce """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRawReactionForce + outputs : OutputsRawReactionForce """ return super().outputs -#internal name: RFTOT -#scripting name: raw_reaction_force class InputsRawReactionForce(_Inputs): - """Intermediate class used to connect user inputs to raw_reaction_force operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.raw_reaction_force() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + raw_reaction_force operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.raw_reaction_force() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(raw_reaction_force._spec().inputs, op) - self._time_scoping = Input(raw_reaction_force._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(raw_reaction_force._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(raw_reaction_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(raw_reaction_force._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(raw_reaction_force._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + raw_reaction_force._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(raw_reaction_force._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + raw_reaction_force._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(raw_reaction_force._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(raw_reaction_force._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(raw_reaction_force._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + raw_reaction_force._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(raw_reaction_force._spec().input_pin(7), 7, op, -1) + self._mesh = Input(raw_reaction_force._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(raw_reaction_force._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(raw_reaction_force._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsRawReactionForce(_Outputs): - """Intermediate class used to get outputs from raw_reaction_force operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.raw_reaction_force() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + raw_reaction_force operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.raw_reaction_force() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(raw_reaction_force._spec().outputs, op) - self._fields_container = Output(raw_reaction_force._spec().output_pin(0), 0, op) + self._fields_container = Output(raw_reaction_force._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.raw_reaction_force() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/reaction_force.py b/ansys/dpf/core/operators/result/reaction_force.py index 5d1b1b2b446..14fbfb33d63 100644 --- a/ansys/dpf/core/operators/result/reaction_force.py +++ b/ansys/dpf/core/operators/result/reaction_force.py @@ -1,92 +1,238 @@ """ reaction_force -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class reaction_force(Operator): - """Read/compute nodal reaction forces by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.reaction_force() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.reaction_force(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="RF", config = config, server = server) + """Read/compute nodal reaction forces by calling the readers defined by + the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.reaction_force() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.reaction_force( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="RF", config=config, server=server) self._inputs = InputsReactionForce(self) self._outputs = OutputsReactionForce(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal reaction forces by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal reaction forces by calling the readers defined by + the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "RF") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="RF", server=server) @property def inputs(self): @@ -94,275 +240,277 @@ def inputs(self): Returns -------- - inputs : InputsReactionForce + inputs : InputsReactionForce """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsReactionForce + outputs : OutputsReactionForce """ return super().outputs -#internal name: RF -#scripting name: reaction_force class InputsReactionForce(_Inputs): - """Intermediate class used to connect user inputs to reaction_force operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.reaction_force() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + reaction_force operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.reaction_force() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(reaction_force._spec().inputs, op) - self._time_scoping = Input(reaction_force._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(reaction_force._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(reaction_force._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(reaction_force._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(reaction_force._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(reaction_force._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(reaction_force._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(reaction_force._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(reaction_force._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(reaction_force._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(reaction_force._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + reaction_force._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(reaction_force._spec().input_pin(7), 7, op, -1) + self._mesh = Input(reaction_force._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(reaction_force._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(reaction_force._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsReactionForce(_Outputs): - """Intermediate class used to get outputs from reaction_force operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.reaction_force() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + reaction_force operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.reaction_force() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(reaction_force._spec().outputs, op) - self._fields_container = Output(reaction_force._spec().output_pin(0), 0, op) + self._fields_container = Output(reaction_force._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.reaction_force() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/recombine_harmonic_indeces_cyclic.py b/ansys/dpf/core/operators/result/recombine_harmonic_indeces_cyclic.py index 687bdb2ab6e..68fa6c6f663 100644 --- a/ansys/dpf/core/operators/result/recombine_harmonic_indeces_cyclic.py +++ b/ansys/dpf/core/operators/result/recombine_harmonic_indeces_cyclic.py @@ -1,60 +1,95 @@ """ recombine_harmonic_indeces_cyclic -================================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "result" category -""" class recombine_harmonic_indeces_cyclic(Operator): - """Add the fields corresponding to different load steps with the same frequencies to compute the response. + """Add the fields corresponding to different load steps with the same + frequencies to compute the response. + + Parameters + ---------- + fields_container : FieldsContainer - available inputs: - - fields_container (FieldsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic() - >>> # Instantiate operator - >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic( + ... fields_container=my_fields_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic(fields_container=my_fields_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, config=None, server=None): - super().__init__(name="recombine_harmonic_indeces_cyclic", config = config, server = server) + super().__init__( + name="recombine_harmonic_indeces_cyclic", config=config, server=server + ) self._inputs = InputsRecombineHarmonicIndecesCyclic(self) self._outputs = OutputsRecombineHarmonicIndecesCyclic(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) @staticmethod def _spec(): - spec = Specification(description="""Add the fields corresponding to different load steps with the same frequencies to compute the response.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Add the fields corresponding to different load steps with the same + frequencies to compute the response.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "recombine_harmonic_indeces_cyclic") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="recombine_harmonic_indeces_cyclic", server=server + ) @property def inputs(self): @@ -62,91 +97,91 @@ def inputs(self): Returns -------- - inputs : InputsRecombineHarmonicIndecesCyclic + inputs : InputsRecombineHarmonicIndecesCyclic """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRecombineHarmonicIndecesCyclic + outputs : OutputsRecombineHarmonicIndecesCyclic """ return super().outputs -#internal name: recombine_harmonic_indeces_cyclic -#scripting name: recombine_harmonic_indeces_cyclic class InputsRecombineHarmonicIndecesCyclic(_Inputs): - """Intermediate class used to connect user inputs to recombine_harmonic_indeces_cyclic operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) + """Intermediate class used to connect user inputs to + recombine_harmonic_indeces_cyclic operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) """ + def __init__(self, op: Operator): super().__init__(recombine_harmonic_indeces_cyclic._spec().inputs, op) - self._fields_container = Input(recombine_harmonic_indeces_cyclic._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + recombine_harmonic_indeces_cyclic._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container + class OutputsRecombineHarmonicIndecesCyclic(_Outputs): - """Intermediate class used to get outputs from recombine_harmonic_indeces_cyclic operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + recombine_harmonic_indeces_cyclic operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(recombine_harmonic_indeces_cyclic._spec().outputs, op) - self._fields_container = Output(recombine_harmonic_indeces_cyclic._spec().output_pin(0), 0, op) + self._fields_container = Output( + recombine_harmonic_indeces_cyclic._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.recombine_harmonic_indeces_cyclic() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/remove_rigid_body_motion.py b/ansys/dpf/core/operators/result/remove_rigid_body_motion.py index 526bfd02330..30456fd1bda 100644 --- a/ansys/dpf/core/operators/result/remove_rigid_body_motion.py +++ b/ansys/dpf/core/operators/result/remove_rigid_body_motion.py @@ -1,72 +1,124 @@ """ remove_rigid_body_motion -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class remove_rigid_body_motion(Operator): - """Removes rigid body mode from a total displacement field by minimization. Use a reference point in order to substract its displacement to the result displacement field. - - available inputs: - - field (Field, FieldsContainer) - - reference_node_id (int) (optional) - - mesh (MeshedRegion) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.remove_rigid_body_motion() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_reference_node_id = int() - >>> op.inputs.reference_node_id.connect(my_reference_node_id) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.remove_rigid_body_motion(field=my_field,reference_node_id=my_reference_node_id,mesh=my_mesh) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, reference_node_id=None, mesh=None, config=None, server=None): - super().__init__(name="ExtractRigidBodyMotion", config = config, server = server) + """Removes rigid body mode from a total displacement field by + minimization. Use a reference point in order to substract its + displacement to the result displacement field. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + reference_node_id : int, optional + Id of the reference entity (node). + mesh : MeshedRegion, optional + Default is the mesh in the support + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.remove_rigid_body_motion() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_reference_node_id = int() + >>> op.inputs.reference_node_id.connect(my_reference_node_id) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.remove_rigid_body_motion( + ... field=my_field, + ... reference_node_id=my_reference_node_id, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, field=None, reference_node_id=None, mesh=None, config=None, server=None + ): + super().__init__(name="ExtractRigidBodyMotion", config=config, server=server) self._inputs = InputsRemoveRigidBodyMotion(self) self._outputs = OutputsRemoveRigidBodyMotion(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if reference_node_id !=None: + if reference_node_id is not None: self.inputs.reference_node_id.connect(reference_node_id) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Removes rigid body mode from a total displacement field by minimization. Use a reference point in order to substract its displacement to the result displacement field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "reference_node_id", type_names=["int32"], optional=True, document="""Id of the reference entity (node)."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""default is the mesh in the support""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Removes rigid body mode from a total displacement field by + minimization. Use a reference point in order to substract + its displacement to the result displacement field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="reference_node_id", + type_names=["int32"], + optional=True, + document="""Id of the reference entity (node).""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Default is the mesh in the support""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ExtractRigidBodyMotion") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ExtractRigidBodyMotion", server=server) @property def inputs(self): @@ -74,145 +126,140 @@ def inputs(self): Returns -------- - inputs : InputsRemoveRigidBodyMotion + inputs : InputsRemoveRigidBodyMotion """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRemoveRigidBodyMotion + outputs : OutputsRemoveRigidBodyMotion """ return super().outputs -#internal name: ExtractRigidBodyMotion -#scripting name: remove_rigid_body_motion class InputsRemoveRigidBodyMotion(_Inputs): - """Intermediate class used to connect user inputs to remove_rigid_body_motion operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.remove_rigid_body_motion() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_reference_node_id = int() - >>> op.inputs.reference_node_id.connect(my_reference_node_id) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + remove_rigid_body_motion operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.remove_rigid_body_motion() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_reference_node_id = int() + >>> op.inputs.reference_node_id.connect(my_reference_node_id) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(remove_rigid_body_motion._spec().inputs, op) - self._field = Input(remove_rigid_body_motion._spec().input_pin(0), 0, op, -1) + self._field = Input(remove_rigid_body_motion._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._reference_node_id = Input(remove_rigid_body_motion._spec().input_pin(1), 1, op, -1) + self._reference_node_id = Input( + remove_rigid_body_motion._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._reference_node_id) - self._mesh = Input(remove_rigid_body_motion._spec().input_pin(7), 7, op, -1) + self._mesh = Input(remove_rigid_body_motion._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.remove_rigid_body_motion() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def reference_node_id(self): - """Allows to connect reference_node_id input to the operator + """Allows to connect reference_node_id input to the operator. - - pindoc: Id of the reference entity (node). + Id of the reference entity (node). Parameters ---------- - my_reference_node_id : int, + my_reference_node_id : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.remove_rigid_body_motion() >>> op.inputs.reference_node_id.connect(my_reference_node_id) - >>> #or + >>> # or >>> op.inputs.reference_node_id(my_reference_node_id) - """ return self._reference_node_id @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: default is the mesh in the support + Default is the mesh in the support Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.remove_rigid_body_motion() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsRemoveRigidBodyMotion(_Outputs): - """Intermediate class used to get outputs from remove_rigid_body_motion operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.remove_rigid_body_motion() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + remove_rigid_body_motion operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.remove_rigid_body_motion() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(remove_rigid_body_motion._spec().outputs, op) - self._field = Output(remove_rigid_body_motion._spec().output_pin(0), 0, op) + self._field = Output(remove_rigid_body_motion._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.remove_rigid_body_motion() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/result/remove_rigid_body_motion_fc.py b/ansys/dpf/core/operators/result/remove_rigid_body_motion_fc.py index 5c969535b14..751dd62e50d 100644 --- a/ansys/dpf/core/operators/result/remove_rigid_body_motion_fc.py +++ b/ansys/dpf/core/operators/result/remove_rigid_body_motion_fc.py @@ -1,72 +1,129 @@ """ remove_rigid_body_motion_fc -=========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class remove_rigid_body_motion_fc(Operator): - """Removes rigid body mode from a total displacement field by minimization. Use a reference point in order to substract its displacement to the result displacement field. - - available inputs: - - fields_container (FieldsContainer) - - reference_node_id (int) (optional) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.remove_rigid_body_motion_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_reference_node_id = int() - >>> op.inputs.reference_node_id.connect(my_reference_node_id) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.remove_rigid_body_motion_fc(fields_container=my_fields_container,reference_node_id=my_reference_node_id,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, reference_node_id=None, mesh=None, config=None, server=None): - super().__init__(name="ExtractRigidBodyMotion_fc", config = config, server = server) + """Removes rigid body mode from a total displacement field by + minimization. Use a reference point in order to substract its + displacement to the result displacement field. + + Parameters + ---------- + fields_container : FieldsContainer + Field or fields container with only one field + is expected + reference_node_id : int, optional + Id of the reference entity (node). + mesh : MeshedRegion, optional + Default is the mesh in the support + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.remove_rigid_body_motion_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_reference_node_id = int() + >>> op.inputs.reference_node_id.connect(my_reference_node_id) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.remove_rigid_body_motion_fc( + ... fields_container=my_fields_container, + ... reference_node_id=my_reference_node_id, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + reference_node_id=None, + mesh=None, + config=None, + server=None, + ): + super().__init__(name="ExtractRigidBodyMotion_fc", config=config, server=server) self._inputs = InputsRemoveRigidBodyMotionFc(self) self._outputs = OutputsRemoveRigidBodyMotionFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if reference_node_id !=None: + if reference_node_id is not None: self.inputs.reference_node_id.connect(reference_node_id) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Removes rigid body mode from a total displacement field by minimization. Use a reference point in order to substract its displacement to the result displacement field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "reference_node_id", type_names=["int32"], optional=True, document="""Id of the reference entity (node)."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""default is the mesh in the support""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Removes rigid body mode from a total displacement field by + minimization. Use a reference point in order to substract + its displacement to the result displacement field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="reference_node_id", + type_names=["int32"], + optional=True, + document="""Id of the reference entity (node).""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Default is the mesh in the support""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ExtractRigidBodyMotion_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ExtractRigidBodyMotion_fc", server=server) @property def inputs(self): @@ -74,145 +131,144 @@ def inputs(self): Returns -------- - inputs : InputsRemoveRigidBodyMotionFc + inputs : InputsRemoveRigidBodyMotionFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRemoveRigidBodyMotionFc + outputs : OutputsRemoveRigidBodyMotionFc """ return super().outputs -#internal name: ExtractRigidBodyMotion_fc -#scripting name: remove_rigid_body_motion_fc class InputsRemoveRigidBodyMotionFc(_Inputs): - """Intermediate class used to connect user inputs to remove_rigid_body_motion_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.remove_rigid_body_motion_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_reference_node_id = int() - >>> op.inputs.reference_node_id.connect(my_reference_node_id) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + remove_rigid_body_motion_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.remove_rigid_body_motion_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_reference_node_id = int() + >>> op.inputs.reference_node_id.connect(my_reference_node_id) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(remove_rigid_body_motion_fc._spec().inputs, op) - self._fields_container = Input(remove_rigid_body_motion_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + remove_rigid_body_motion_fc._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._reference_node_id = Input(remove_rigid_body_motion_fc._spec().input_pin(1), 1, op, -1) + self._reference_node_id = Input( + remove_rigid_body_motion_fc._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._reference_node_id) - self._mesh = Input(remove_rigid_body_motion_fc._spec().input_pin(7), 7, op, -1) + self._mesh = Input(remove_rigid_body_motion_fc._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.remove_rigid_body_motion_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def reference_node_id(self): - """Allows to connect reference_node_id input to the operator + """Allows to connect reference_node_id input to the operator. - - pindoc: Id of the reference entity (node). + Id of the reference entity (node). Parameters ---------- - my_reference_node_id : int, + my_reference_node_id : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.remove_rigid_body_motion_fc() >>> op.inputs.reference_node_id.connect(my_reference_node_id) - >>> #or + >>> # or >>> op.inputs.reference_node_id(my_reference_node_id) - """ return self._reference_node_id @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: default is the mesh in the support + Default is the mesh in the support Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.remove_rigid_body_motion_fc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsRemoveRigidBodyMotionFc(_Outputs): - """Intermediate class used to get outputs from remove_rigid_body_motion_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.remove_rigid_body_motion_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + remove_rigid_body_motion_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.remove_rigid_body_motion_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(remove_rigid_body_motion_fc._spec().outputs, op) - self._fields_container = Output(remove_rigid_body_motion_fc._spec().output_pin(0), 0, op) + self._fields_container = Output( + remove_rigid_body_motion_fc._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.remove_rigid_body_motion_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/rigid_transformation.py b/ansys/dpf/core/operators/result/rigid_transformation.py index ad094c5d857..5b9f45ad76d 100644 --- a/ansys/dpf/core/operators/result/rigid_transformation.py +++ b/ansys/dpf/core/operators/result/rigid_transformation.py @@ -1,66 +1,111 @@ """ rigid_transformation -==================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class rigid_transformation(Operator): """Extracts rigid body motions from a displacement in input. - available inputs: - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) + Parameters + ---------- + streams_container : StreamsContainer, optional + Streams (result file container) (optional) + data_sources : DataSources + If the stream is null then we need to get the + file path from the data sources + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.result.rigid_transformation() - >>> # Instantiate operator - >>> op = dpf.operators.result.rigid_transformation() + >>> # Make input connections + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.rigid_transformation( + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.rigid_transformation(streams_container=my_streams_container,data_sources=my_data_sources) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="RigidTransformationProvider", config = config, server = server) + def __init__( + self, streams_container=None, data_sources=None, config=None, server=None + ): + super().__init__( + name="RigidTransformationProvider", config=config, server=server + ) self._inputs = InputsRigidTransformation(self) self._outputs = OutputsRigidTransformation(self) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Extracts rigid body motions from a displacement in input.""", - map_input_pin_spec={ - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""streams (result file container) (optional)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""if the stream is null then we need to get the file path from the data sources""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Extracts rigid body motions from a displacement in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Streams (result file container) (optional)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""If the stream is null then we need to get the + file path from the data sources""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "RigidTransformationProvider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="RigidTransformationProvider", server=server + ) @property def inputs(self): @@ -68,119 +113,118 @@ def inputs(self): Returns -------- - inputs : InputsRigidTransformation + inputs : InputsRigidTransformation """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRigidTransformation + outputs : OutputsRigidTransformation """ return super().outputs -#internal name: RigidTransformationProvider -#scripting name: rigid_transformation class InputsRigidTransformation(_Inputs): - """Intermediate class used to connect user inputs to rigid_transformation operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.rigid_transformation() - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + rigid_transformation operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.rigid_transformation() + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(rigid_transformation._spec().inputs, op) - self._streams_container = Input(rigid_transformation._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + rigid_transformation._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(rigid_transformation._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(rigid_transformation._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: streams (result file container) (optional) + Streams (result file container) (optional) Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.rigid_transformation() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: if the stream is null then we need to get the file path from the data sources + If the stream is null then we need to get the + file path from the data sources Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.rigid_transformation() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsRigidTransformation(_Outputs): - """Intermediate class used to get outputs from rigid_transformation operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.rigid_transformation() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + rigid_transformation operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.rigid_transformation() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(rigid_transformation._spec().outputs, op) - self._fields_container = Output(rigid_transformation._spec().output_pin(0), 0, op) + self._fields_container = Output( + rigid_transformation._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.rigid_transformation() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/run.py b/ansys/dpf/core/operators/result/run.py index b3db7be2643..73cc0f6c306 100644 --- a/ansys/dpf/core/operators/result/run.py +++ b/ansys/dpf/core/operators/result/run.py @@ -1,78 +1,139 @@ """ run -=== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class run(Operator): - """Solve in mapdl a dat/inp file and returns a datasources with the rst file. - - available inputs: - - mapdl_exe_path (str) (optional) - - working_dir (str) (optional) - - number_of_processes (int) (optional) - - data_sources (DataSources) - - available outputs: - - data_sources (DataSources) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.run() - - >>> # Make input connections - >>> my_mapdl_exe_path = str() - >>> op.inputs.mapdl_exe_path.connect(my_mapdl_exe_path) - >>> my_working_dir = str() - >>> op.inputs.working_dir.connect(my_working_dir) - >>> my_number_of_processes = int() - >>> op.inputs.number_of_processes.connect(my_number_of_processes) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.run(mapdl_exe_path=my_mapdl_exe_path,working_dir=my_working_dir,number_of_processes=my_number_of_processes,data_sources=my_data_sources) - - >>> # Get output data - >>> result_data_sources = op.outputs.data_sources()""" - def __init__(self, mapdl_exe_path=None, working_dir=None, number_of_processes=None, data_sources=None, config=None, server=None): - super().__init__(name="mapdl::run", config = config, server = server) + """Solve in mapdl a dat/inp file and returns a datasources with the rst + file. + + Parameters + ---------- + mapdl_exe_path : str, optional + working_dir : str, optional + number_of_processes : int, optional + Set the number of mpi processes used for + resolution (default is 2) + data_sources : DataSources + Data sources containing the input file. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.run() + + >>> # Make input connections + >>> my_mapdl_exe_path = str() + >>> op.inputs.mapdl_exe_path.connect(my_mapdl_exe_path) + >>> my_working_dir = str() + >>> op.inputs.working_dir.connect(my_working_dir) + >>> my_number_of_processes = int() + >>> op.inputs.number_of_processes.connect(my_number_of_processes) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.run( + ... mapdl_exe_path=my_mapdl_exe_path, + ... working_dir=my_working_dir, + ... number_of_processes=my_number_of_processes, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_data_sources = op.outputs.data_sources() + """ + + def __init__( + self, + mapdl_exe_path=None, + working_dir=None, + number_of_processes=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::run", config=config, server=server) self._inputs = InputsRun(self) self._outputs = OutputsRun(self) - if mapdl_exe_path !=None: + if mapdl_exe_path is not None: self.inputs.mapdl_exe_path.connect(mapdl_exe_path) - if working_dir !=None: + if working_dir is not None: self.inputs.working_dir.connect(working_dir) - if number_of_processes !=None: + if number_of_processes is not None: self.inputs.number_of_processes.connect(number_of_processes) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Solve in mapdl a dat/inp file and returns a datasources with the rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mapdl_exe_path", type_names=["string"], optional=True, document=""""""), - 1 : PinSpecification(name = "working_dir", type_names=["string"], optional=True, document=""""""), - 2 : PinSpecification(name = "number_of_processes", type_names=["int32"], optional=True, document="""Set the number of MPI processes used for resolution (default is 2)"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the input file.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}) + description = """Solve in mapdl a dat/inp file and returns a datasources with the rst + file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mapdl_exe_path", + type_names=["string"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="working_dir", + type_names=["string"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="number_of_processes", + type_names=["int32"], + optional=True, + document="""Set the number of mpi processes used for + resolution (default is 2)""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the input file.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::run") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::run", server=server) @property def inputs(self): @@ -80,167 +141,158 @@ def inputs(self): Returns -------- - inputs : InputsRun + inputs : InputsRun """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRun + outputs : OutputsRun """ return super().outputs -#internal name: mapdl::run -#scripting name: run class InputsRun(_Inputs): - """Intermediate class used to connect user inputs to run operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.run() - >>> my_mapdl_exe_path = str() - >>> op.inputs.mapdl_exe_path.connect(my_mapdl_exe_path) - >>> my_working_dir = str() - >>> op.inputs.working_dir.connect(my_working_dir) - >>> my_number_of_processes = int() - >>> op.inputs.number_of_processes.connect(my_number_of_processes) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + run operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.run() + >>> my_mapdl_exe_path = str() + >>> op.inputs.mapdl_exe_path.connect(my_mapdl_exe_path) + >>> my_working_dir = str() + >>> op.inputs.working_dir.connect(my_working_dir) + >>> my_number_of_processes = int() + >>> op.inputs.number_of_processes.connect(my_number_of_processes) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(run._spec().inputs, op) - self._mapdl_exe_path = Input(run._spec().input_pin(0), 0, op, -1) + self._mapdl_exe_path = Input(run._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mapdl_exe_path) - self._working_dir = Input(run._spec().input_pin(1), 1, op, -1) + self._working_dir = Input(run._spec().input_pin(1), 1, op, -1) self._inputs.append(self._working_dir) - self._number_of_processes = Input(run._spec().input_pin(2), 2, op, -1) + self._number_of_processes = Input(run._spec().input_pin(2), 2, op, -1) self._inputs.append(self._number_of_processes) - self._data_sources = Input(run._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(run._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def mapdl_exe_path(self): - """Allows to connect mapdl_exe_path input to the operator + """Allows to connect mapdl_exe_path input to the operator. Parameters ---------- - my_mapdl_exe_path : str, + my_mapdl_exe_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.run() >>> op.inputs.mapdl_exe_path.connect(my_mapdl_exe_path) - >>> #or + >>> # or >>> op.inputs.mapdl_exe_path(my_mapdl_exe_path) - """ return self._mapdl_exe_path @property def working_dir(self): - """Allows to connect working_dir input to the operator + """Allows to connect working_dir input to the operator. Parameters ---------- - my_working_dir : str, + my_working_dir : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.run() >>> op.inputs.working_dir.connect(my_working_dir) - >>> #or + >>> # or >>> op.inputs.working_dir(my_working_dir) - """ return self._working_dir @property def number_of_processes(self): - """Allows to connect number_of_processes input to the operator + """Allows to connect number_of_processes input to the operator. - - pindoc: Set the number of MPI processes used for resolution (default is 2) + Set the number of mpi processes used for + resolution (default is 2) Parameters ---------- - my_number_of_processes : int, + my_number_of_processes : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.run() >>> op.inputs.number_of_processes.connect(my_number_of_processes) - >>> #or + >>> # or >>> op.inputs.number_of_processes(my_number_of_processes) - """ return self._number_of_processes @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the input file. + Data sources containing the input file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.run() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsRun(_Outputs): - """Intermediate class used to get outputs from run operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.run() - >>> # Connect inputs : op.inputs. ... - >>> result_data_sources = op.outputs.data_sources() + """Intermediate class used to get outputs from + run operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.run() + >>> # Connect inputs : op.inputs. ... + >>> result_data_sources = op.outputs.data_sources() """ + def __init__(self, op: Operator): super().__init__(run._spec().outputs, op) - self._data_sources = Output(run._spec().output_pin(0), 0, op) + self._data_sources = Output(run._spec().output_pin(0), 0, op) self._outputs.append(self._data_sources) @property def data_sources(self): """Allows to get data_sources output of the operator - Returns ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.run() >>> # Connect inputs : op.inputs. ... - >>> result_data_sources = op.outputs.data_sources() - """ + >>> result_data_sources = op.outputs.data_sources() + """ # noqa: E501 return self._data_sources - diff --git a/ansys/dpf/core/operators/result/smisc.py b/ansys/dpf/core/operators/result/smisc.py index 6d89eaced90..3cf5e261f62 100644 --- a/ansys/dpf/core/operators/result/smisc.py +++ b/ansys/dpf/core/operators/result/smisc.py @@ -1,90 +1,192 @@ """ smisc -===== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class smisc(Operator): """Read SMISC results from the rst file. - available inputs: - - time_scoping (Scoping, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping, list) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - mesh (MeshedRegion) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.smisc() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.smisc(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources,mesh=my_mesh) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, fields_container=None, streams_container=None, data_sources=None, mesh=None, config=None, server=None): - super().__init__(name="mapdl::smisc", config = config, server = server) + Parameters + ---------- + time_scoping : Scoping, optional + mesh_scoping : ScopingsContainer or Scoping, optional + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer or Stream, optional + Streams containing the result file. + data_sources : DataSources + Data sources containing the result file. + mesh : MeshedRegion, optional + item_index : int + Index of requested item. + num_components : int, optional + Number of components for the requested item. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.smisc() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_item_index = int() + >>> op.inputs.item_index.connect(my_item_index) + >>> my_num_components = int() + >>> op.inputs.num_components.connect(my_num_components) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.smisc( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... mesh=my_mesh, + ... item_index=my_item_index, + ... num_components=my_num_components, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + mesh=None, + item_index=None, + num_components=None, + config=None, + server=None, + ): + super().__init__(name="mapdl::smisc", config=config, server=server) self._inputs = InputsSmisc(self) self._outputs = OutputsSmisc(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) + if item_index is not None: + self.inputs.item_index.connect(item_index) + if num_components is not None: + self.inputs.num_components.connect(num_components) @staticmethod def _spec(): - spec = Specification(description="""Read SMISC results from the rst file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","vector"], optional=True, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping","vector"], optional=True, document=""""""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document="""Streams containing the result file."""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing the result file."""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""FieldsContainer filled in""")}) + description = """Read SMISC results from the rst file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping", "vector"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container", "stream"], + optional=True, + document="""Streams containing the result file.""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing the result file.""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 10: PinSpecification( + name="item_index", + type_names=["int32"], + optional=False, + document="""Index of requested item.""", + ), + 11: PinSpecification( + name="num_components", + type_names=["int32"], + optional=True, + document="""Number of components for the requested item.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fieldscontainer filled in""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::smisc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mapdl::smisc", server=server) @property def inputs(self): @@ -92,219 +194,252 @@ def inputs(self): Returns -------- - inputs : InputsSmisc + inputs : InputsSmisc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSmisc + outputs : OutputsSmisc """ return super().outputs -#internal name: mapdl::smisc -#scripting name: smisc class InputsSmisc(_Inputs): - """Intermediate class used to connect user inputs to smisc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.smisc() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + smisc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.smisc() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_item_index = int() + >>> op.inputs.item_index.connect(my_item_index) + >>> my_num_components = int() + >>> op.inputs.num_components.connect(my_num_components) """ + def __init__(self, op: Operator): super().__init__(smisc._spec().inputs, op) - self._time_scoping = Input(smisc._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(smisc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(smisc._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(smisc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(smisc._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(smisc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(smisc._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(smisc._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(smisc._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(smisc._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._mesh = Input(smisc._spec().input_pin(7), 7, op, -1) + self._mesh = Input(smisc._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) + self._item_index = Input(smisc._spec().input_pin(10), 10, op, -1) + self._inputs.append(self._item_index) + self._num_components = Input(smisc._spec().input_pin(11), 11, op, -1) + self._inputs.append(self._num_components) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, list, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.smisc() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, list, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.smisc() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.smisc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: Streams containing the result file. + Streams containing the result file. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.smisc() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing the result file. + Data sources containing the result file. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.smisc() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.smisc() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + @property + def item_index(self): + """Allows to connect item_index input to the operator. + + Index of requested item. + + Parameters + ---------- + my_item_index : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.smisc() + >>> op.inputs.item_index.connect(my_item_index) + >>> # or + >>> op.inputs.item_index(my_item_index) + """ + return self._item_index + + @property + def num_components(self): + """Allows to connect num_components input to the operator. + + Number of components for the requested item. + + Parameters + ---------- + my_num_components : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.smisc() + >>> op.inputs.num_components.connect(my_num_components) + >>> # or + >>> op.inputs.num_components(my_num_components) + """ + return self._num_components + + class OutputsSmisc(_Outputs): - """Intermediate class used to get outputs from smisc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.smisc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + smisc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.smisc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(smisc._spec().outputs, op) - self._fields_container = Output(smisc._spec().output_pin(0), 0, op) + self._fields_container = Output(smisc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: FieldsContainer filled in - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.smisc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stiffness_matrix_energy.py b/ansys/dpf/core/operators/result/stiffness_matrix_energy.py index 02d1b97d4a0..2cf87a964f9 100644 --- a/ansys/dpf/core/operators/result/stiffness_matrix_energy.py +++ b/ansys/dpf/core/operators/result/stiffness_matrix_energy.py @@ -1,92 +1,238 @@ """ stiffness_matrix_energy -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stiffness_matrix_energy(Operator): - """Read/compute element energy associated with the stiffness matrix by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stiffness_matrix_energy() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stiffness_matrix_energy(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ENG_SE", config = config, server = server) + """Read/compute element energy associated with the stiffness matrix by + calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stiffness_matrix_energy() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stiffness_matrix_energy( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ENG_SE", config=config, server=server) self._inputs = InputsStiffnessMatrixEnergy(self) self._outputs = OutputsStiffnessMatrixEnergy(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element energy associated with the stiffness matrix by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element energy associated with the stiffness matrix by + calling the readers defined by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENG_SE") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENG_SE", server=server) @property def inputs(self): @@ -94,275 +240,291 @@ def inputs(self): Returns -------- - inputs : InputsStiffnessMatrixEnergy + inputs : InputsStiffnessMatrixEnergy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStiffnessMatrixEnergy + outputs : OutputsStiffnessMatrixEnergy """ return super().outputs -#internal name: ENG_SE -#scripting name: stiffness_matrix_energy class InputsStiffnessMatrixEnergy(_Inputs): - """Intermediate class used to connect user inputs to stiffness_matrix_energy operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stiffness_matrix_energy() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stiffness_matrix_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stiffness_matrix_energy() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stiffness_matrix_energy._spec().inputs, op) - self._time_scoping = Input(stiffness_matrix_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + stiffness_matrix_energy._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stiffness_matrix_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + stiffness_matrix_energy._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stiffness_matrix_energy._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + stiffness_matrix_energy._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stiffness_matrix_energy._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + stiffness_matrix_energy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stiffness_matrix_energy._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + stiffness_matrix_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stiffness_matrix_energy._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + stiffness_matrix_energy._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stiffness_matrix_energy._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stiffness_matrix_energy._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(stiffness_matrix_energy._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + stiffness_matrix_energy._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsStiffnessMatrixEnergy(_Outputs): - """Intermediate class used to get outputs from stiffness_matrix_energy operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stiffness_matrix_energy() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stiffness_matrix_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stiffness_matrix_energy() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stiffness_matrix_energy._spec().outputs, op) - self._fields_container = Output(stiffness_matrix_energy._spec().output_pin(0), 0, op) + self._fields_container = Output( + stiffness_matrix_energy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stiffness_matrix_energy() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress.py b/ansys/dpf/core/operators/result/stress.py index 49066c895ad..27c76f3b25a 100644 --- a/ansys/dpf/core/operators/result/stress.py +++ b/ansys/dpf/core/operators/result/stress.py @@ -1,98 +1,274 @@ """ stress -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress(Operator): - """Read/compute element nodal component stresses by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="S", config = config, server = server) + """Read/compute element nodal component stresses by calling the readers + defined by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="S", config=config, server=server) self._inputs = InputsStress(self) self._outputs = OutputsStress(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses by calling the readers + defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "S") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="S", server=server) @property def inputs(self): @@ -100,301 +276,325 @@ def inputs(self): Returns -------- - inputs : InputsStress + inputs : InputsStress """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStress + outputs : OutputsStress """ return super().outputs -#internal name: S -#scripting name: stress class InputsStress(_Inputs): - """Intermediate class used to connect user inputs to stress operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(stress._spec().inputs, op) - self._time_scoping = Input(stress._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(stress._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(stress._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(stress._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(stress._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(stress._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsStress(_Outputs): - """Intermediate class used to get outputs from stress operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress._spec().outputs, op) - self._fields_container = Output(stress._spec().output_pin(0), 0, op) + self._fields_container = Output(stress._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_X.py b/ansys/dpf/core/operators/result/stress_X.py index 4d3db7b5c11..95b4c40c490 100644 --- a/ansys/dpf/core/operators/result/stress_X.py +++ b/ansys/dpf/core/operators/result/stress_X.py @@ -1,98 +1,257 @@ """ stress_X -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_X(Operator): - """Read/compute element nodal component stresses XX normal component (00 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_X() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_X(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="SX", config = config, server = server) + """Read/compute element nodal component stresses XX normal component (00 + component) by calling the readers defined by the datasources. + Regarding the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_X( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="SX", config=config, server=server) self._inputs = InputsStressX(self) self._outputs = OutputsStressX(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses XX normal component (00 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses XX normal component (00 + component) by calling the readers defined by the + datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "SX") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="SX", server=server) @property def inputs(self): @@ -100,301 +259,299 @@ def inputs(self): Returns -------- - inputs : InputsStressX + inputs : InputsStressX """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressX + outputs : OutputsStressX """ return super().outputs -#internal name: SX -#scripting name: stress_X class InputsStressX(_Inputs): - """Intermediate class used to connect user inputs to stress_X operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_X() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_X._spec().inputs, op) - self._time_scoping = Input(stress_X._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_X._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_X._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_X._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress_X._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_X._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(stress_X._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_X._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_X._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(stress_X._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_X._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_X._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_X._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(stress_X._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsStressX(_Outputs): - """Intermediate class used to get outputs from stress_X operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_X() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_X._spec().outputs, op) - self._fields_container = Output(stress_X._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_X._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_X() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_XY.py b/ansys/dpf/core/operators/result/stress_XY.py index 8110259495a..966dff3f14a 100644 --- a/ansys/dpf/core/operators/result/stress_XY.py +++ b/ansys/dpf/core/operators/result/stress_XY.py @@ -1,98 +1,257 @@ """ stress_XY -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_XY(Operator): - """Read/compute element nodal component stresses XY shear component (01 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_XY() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_XY(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="SXY", config = config, server = server) - self._inputs = InputsStressXY(self) - self._outputs = OutputsStressXY(self) - if time_scoping !=None: + """Read/compute element nodal component stresses XY shear component (01 + component) by calling the readers defined by the datasources. + Regarding the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_XY() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_XY( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="SXY", config=config, server=server) + self._inputs = InputsStressXy(self) + self._outputs = OutputsStressXy(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses XY shear component (01 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses XY shear component (01 + component) by calling the readers defined by the + datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "SXY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="SXY", server=server) @property def inputs(self): @@ -100,301 +259,299 @@ def inputs(self): Returns -------- - inputs : InputsStressXY + inputs : InputsStressXy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressXY + outputs : OutputsStressXy """ return super().outputs -#internal name: SXY -#scripting name: stress_XY -class InputsStressXY(_Inputs): - """Intermediate class used to connect user inputs to stress_XY operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_XY() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsStressXy(_Inputs): + """Intermediate class used to connect user inputs to + stress_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_XY() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_XY._spec().inputs, op) - self._time_scoping = Input(stress_XY._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_XY._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_XY._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_XY._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_XY._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress_XY._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_XY._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(stress_XY._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_XY._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_XY._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_XY._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(stress_XY._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_XY._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_XY._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_XY._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(stress_XY._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_XY._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_XY._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsStressXY(_Outputs): - """Intermediate class used to get outputs from stress_XY operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsStressXy(_Outputs): + """Intermediate class used to get outputs from + stress_XY operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_XY() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_XY._spec().outputs, op) - self._fields_container = Output(stress_XY._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_XY._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XY() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_XZ.py b/ansys/dpf/core/operators/result/stress_XZ.py index f506a62b091..ee862a7456c 100644 --- a/ansys/dpf/core/operators/result/stress_XZ.py +++ b/ansys/dpf/core/operators/result/stress_XZ.py @@ -1,98 +1,257 @@ """ stress_XZ -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_XZ(Operator): - """Read/compute element nodal component stresses XZ shear component (02 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_XZ() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_XZ(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="SXZ", config = config, server = server) - self._inputs = InputsStressXZ(self) - self._outputs = OutputsStressXZ(self) - if time_scoping !=None: + """Read/compute element nodal component stresses XZ shear component (02 + component) by calling the readers defined by the datasources. + Regarding the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_XZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_XZ( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="SXZ", config=config, server=server) + self._inputs = InputsStressXz(self) + self._outputs = OutputsStressXz(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses XZ shear component (02 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses XZ shear component (02 + component) by calling the readers defined by the + datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "SXZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="SXZ", server=server) @property def inputs(self): @@ -100,301 +259,299 @@ def inputs(self): Returns -------- - inputs : InputsStressXZ + inputs : InputsStressXz """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressXZ + outputs : OutputsStressXz """ return super().outputs -#internal name: SXZ -#scripting name: stress_XZ -class InputsStressXZ(_Inputs): - """Intermediate class used to connect user inputs to stress_XZ operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_XZ() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsStressXz(_Inputs): + """Intermediate class used to connect user inputs to + stress_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_XZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_XZ._spec().inputs, op) - self._time_scoping = Input(stress_XZ._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_XZ._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_XZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_XZ._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_XZ._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress_XZ._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_XZ._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(stress_XZ._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_XZ._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_XZ._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_XZ._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(stress_XZ._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_XZ._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_XZ._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_XZ._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(stress_XZ._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_XZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_XZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsStressXZ(_Outputs): - """Intermediate class used to get outputs from stress_XZ operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsStressXz(_Outputs): + """Intermediate class used to get outputs from + stress_XZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_XZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_XZ._spec().outputs, op) - self._fields_container = Output(stress_XZ._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_XZ._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_XZ() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_Y.py b/ansys/dpf/core/operators/result/stress_Y.py index 5a8d0de218c..c0223dcb447 100644 --- a/ansys/dpf/core/operators/result/stress_Y.py +++ b/ansys/dpf/core/operators/result/stress_Y.py @@ -1,98 +1,257 @@ """ stress_Y -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_Y(Operator): - """Read/compute element nodal component stresses YY normal component (11 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_Y() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_Y(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="SY", config = config, server = server) + """Read/compute element nodal component stresses YY normal component (11 + component) by calling the readers defined by the datasources. + Regarding the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_Y( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="SY", config=config, server=server) self._inputs = InputsStressY(self) self._outputs = OutputsStressY(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses YY normal component (11 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses YY normal component (11 + component) by calling the readers defined by the + datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "SY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="SY", server=server) @property def inputs(self): @@ -100,301 +259,299 @@ def inputs(self): Returns -------- - inputs : InputsStressY + inputs : InputsStressY """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressY + outputs : OutputsStressY """ return super().outputs -#internal name: SY -#scripting name: stress_Y class InputsStressY(_Inputs): - """Intermediate class used to connect user inputs to stress_Y operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_Y() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_Y._spec().inputs, op) - self._time_scoping = Input(stress_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_Y._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_Y._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_Y._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress_Y._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_Y._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(stress_Y._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_Y._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_Y._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(stress_Y._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_Y._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_Y._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_Y._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(stress_Y._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsStressY(_Outputs): - """Intermediate class used to get outputs from stress_Y operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_Y() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_Y._spec().outputs, op) - self._fields_container = Output(stress_Y._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_Y._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Y() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_YZ.py b/ansys/dpf/core/operators/result/stress_YZ.py index b296a4d9376..754a252f36b 100644 --- a/ansys/dpf/core/operators/result/stress_YZ.py +++ b/ansys/dpf/core/operators/result/stress_YZ.py @@ -1,98 +1,257 @@ """ stress_YZ -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_YZ(Operator): - """Read/compute element nodal component stresses YZ shear component (12 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_YZ() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_YZ(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="SYZ", config = config, server = server) - self._inputs = InputsStressYZ(self) - self._outputs = OutputsStressYZ(self) - if time_scoping !=None: + """Read/compute element nodal component stresses YZ shear component (12 + component) by calling the readers defined by the datasources. + Regarding the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_YZ() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_YZ( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="SYZ", config=config, server=server) + self._inputs = InputsStressYz(self) + self._outputs = OutputsStressYz(self) + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses YZ shear component (12 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses YZ shear component (12 + component) by calling the readers defined by the + datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "SYZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="SYZ", server=server) @property def inputs(self): @@ -100,301 +259,299 @@ def inputs(self): Returns -------- - inputs : InputsStressYZ + inputs : InputsStressYz """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressYZ + outputs : OutputsStressYz """ return super().outputs -#internal name: SYZ -#scripting name: stress_YZ -class InputsStressYZ(_Inputs): - """Intermediate class used to connect user inputs to stress_YZ operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_YZ() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) +class InputsStressYz(_Inputs): + """Intermediate class used to connect user inputs to + stress_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_YZ() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_YZ._spec().inputs, op) - self._time_scoping = Input(stress_YZ._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_YZ._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_YZ._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_YZ._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_YZ._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress_YZ._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_YZ._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(stress_YZ._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_YZ._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_YZ._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_YZ._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(stress_YZ._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_YZ._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_YZ._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_YZ._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(stress_YZ._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_YZ._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_YZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic -class OutputsStressYZ(_Outputs): - """Intermediate class used to get outputs from stress_YZ operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() +class OutputsStressYz(_Outputs): + """Intermediate class used to get outputs from + stress_YZ operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_YZ() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_YZ._spec().outputs, op) - self._fields_container = Output(stress_YZ._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_YZ._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_YZ() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_Z.py b/ansys/dpf/core/operators/result/stress_Z.py index 883411d20e1..151e9b438c3 100644 --- a/ansys/dpf/core/operators/result/stress_Z.py +++ b/ansys/dpf/core/operators/result/stress_Z.py @@ -1,98 +1,257 @@ """ stress_Z -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_Z(Operator): - """Read/compute element nodal component stresses ZZ normal component (22 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_Z() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_Z(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="SZ", config = config, server = server) + """Read/compute element nodal component stresses ZZ normal component (22 + component) by calling the readers defined by the datasources. + Regarding the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location, default is nodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_Z( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="SZ", config=config, server=server) self._inputs = InputsStressZ(self) self._outputs = OutputsStressZ(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses ZZ normal component (22 component) by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location, default is Nodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses ZZ normal component (22 + component) by calling the readers defined by the + datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location, default is nodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "SZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="SZ", server=server) @property def inputs(self): @@ -100,301 +259,299 @@ def inputs(self): Returns -------- - inputs : InputsStressZ + inputs : InputsStressZ """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressZ + outputs : OutputsStressZ """ return super().outputs -#internal name: SZ -#scripting name: stress_Z class InputsStressZ(_Inputs): - """Intermediate class used to connect user inputs to stress_Z operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_Z() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_Z._spec().inputs, op) - self._time_scoping = Input(stress_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_Z._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_Z._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_Z._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress_Z._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_Z._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(stress_Z._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_Z._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_Z._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(stress_Z._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_Z._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_Z._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_Z._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(stress_Z._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location, default is Nodal + Requested location, default is nodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsStressZ(_Outputs): - """Intermediate class used to get outputs from stress_Z operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_Z() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_Z._spec().outputs, op) - self._fields_container = Output(stress_Z._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_Z._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_Z() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_principal_1.py b/ansys/dpf/core/operators/result/stress_principal_1.py index 1d760f78cb6..3d6e76a6a7a 100644 --- a/ansys/dpf/core/operators/result/stress_principal_1.py +++ b/ansys/dpf/core/operators/result/stress_principal_1.py @@ -1,96 +1,253 @@ """ stress_principal_1 -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_principal_1(Operator): - """Read/compute element nodal component stresses 1st principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_principal_1() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_principal_1(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="S1", config = config, server = server) + """Read/compute element nodal component stresses 1st principal component + by calling the readers defined by the datasources and computing + its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_principal_1() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_principal_1( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="S1", config=config, server=server) self._inputs = InputsStressPrincipal1(self) self._outputs = OutputsStressPrincipal1(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses 1st principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses 1st principal component + by calling the readers defined by the datasources and + computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "S1") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="S1", server=server) @property def inputs(self): @@ -98,299 +255,305 @@ def inputs(self): Returns -------- - inputs : InputsStressPrincipal1 + inputs : InputsStressPrincipal1 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressPrincipal1 + outputs : OutputsStressPrincipal1 """ return super().outputs -#internal name: S1 -#scripting name: stress_principal_1 class InputsStressPrincipal1(_Inputs): - """Intermediate class used to connect user inputs to stress_principal_1 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_principal_1() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress_principal_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_principal_1() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_principal_1._spec().inputs, op) - self._time_scoping = Input(stress_principal_1._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_principal_1._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_principal_1._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_principal_1._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_principal_1._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + stress_principal_1._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_principal_1._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + stress_principal_1._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_principal_1._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_principal_1._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_principal_1._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + stress_principal_1._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_principal_1._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_principal_1._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_principal_1._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + stress_principal_1._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_principal_1._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_principal_1._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsStressPrincipal1(_Outputs): - """Intermediate class used to get outputs from stress_principal_1 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_principal_1() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_principal_1 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_principal_1() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_principal_1._spec().outputs, op) - self._fields_container = Output(stress_principal_1._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_principal_1._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_1() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_principal_2.py b/ansys/dpf/core/operators/result/stress_principal_2.py index 51a384d2af1..628c71100cc 100644 --- a/ansys/dpf/core/operators/result/stress_principal_2.py +++ b/ansys/dpf/core/operators/result/stress_principal_2.py @@ -1,96 +1,253 @@ """ stress_principal_2 -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_principal_2(Operator): - """Read/compute element nodal component stresses 2nd principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_principal_2() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_principal_2(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="S2", config = config, server = server) + """Read/compute element nodal component stresses 2nd principal component + by calling the readers defined by the datasources and computing + its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_principal_2() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_principal_2( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="S2", config=config, server=server) self._inputs = InputsStressPrincipal2(self) self._outputs = OutputsStressPrincipal2(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses 2nd principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses 2nd principal component + by calling the readers defined by the datasources and + computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "S2") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="S2", server=server) @property def inputs(self): @@ -98,299 +255,305 @@ def inputs(self): Returns -------- - inputs : InputsStressPrincipal2 + inputs : InputsStressPrincipal2 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressPrincipal2 + outputs : OutputsStressPrincipal2 """ return super().outputs -#internal name: S2 -#scripting name: stress_principal_2 class InputsStressPrincipal2(_Inputs): - """Intermediate class used to connect user inputs to stress_principal_2 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_principal_2() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress_principal_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_principal_2() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_principal_2._spec().inputs, op) - self._time_scoping = Input(stress_principal_2._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_principal_2._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_principal_2._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_principal_2._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_principal_2._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + stress_principal_2._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_principal_2._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + stress_principal_2._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_principal_2._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_principal_2._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_principal_2._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + stress_principal_2._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_principal_2._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_principal_2._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_principal_2._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + stress_principal_2._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_principal_2._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_principal_2._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsStressPrincipal2(_Outputs): - """Intermediate class used to get outputs from stress_principal_2 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_principal_2() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_principal_2 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_principal_2() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_principal_2._spec().outputs, op) - self._fields_container = Output(stress_principal_2._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_principal_2._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_2() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_principal_3.py b/ansys/dpf/core/operators/result/stress_principal_3.py index 78dc2a4c2ff..2fdc64213f8 100644 --- a/ansys/dpf/core/operators/result/stress_principal_3.py +++ b/ansys/dpf/core/operators/result/stress_principal_3.py @@ -1,96 +1,253 @@ """ stress_principal_3 -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_principal_3(Operator): - """Read/compute element nodal component stresses 3rd principal component by calling the readers defined by the datasources and computing its eigen values. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_principal_3() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_principal_3(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="S3", config = config, server = server) + """Read/compute element nodal component stresses 3rd principal component + by calling the readers defined by the datasources and computing + its eigen values. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_principal_3() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_principal_3( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="S3", config=config, server=server) self._inputs = InputsStressPrincipal3(self) self._outputs = OutputsStressPrincipal3(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component stresses 3rd principal component by calling the readers defined by the datasources and computing its eigen values.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component stresses 3rd principal component + by calling the readers defined by the datasources and + computing its eigen values.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "S3") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="S3", server=server) @property def inputs(self): @@ -98,299 +255,305 @@ def inputs(self): Returns -------- - inputs : InputsStressPrincipal3 + inputs : InputsStressPrincipal3 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressPrincipal3 + outputs : OutputsStressPrincipal3 """ return super().outputs -#internal name: S3 -#scripting name: stress_principal_3 class InputsStressPrincipal3(_Inputs): - """Intermediate class used to connect user inputs to stress_principal_3 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_principal_3() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress_principal_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_principal_3() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_principal_3._spec().inputs, op) - self._time_scoping = Input(stress_principal_3._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_principal_3._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_principal_3._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_principal_3._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_principal_3._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + stress_principal_3._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_principal_3._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + stress_principal_3._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_principal_3._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_principal_3._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_principal_3._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + stress_principal_3._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_principal_3._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_principal_3._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_principal_3._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + stress_principal_3._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_principal_3._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_principal_3._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsStressPrincipal3(_Outputs): - """Intermediate class used to get outputs from stress_principal_3 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_principal_3() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_principal_3 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_principal_3() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_principal_3._spec().outputs, op) - self._fields_container = Output(stress_principal_3._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_principal_3._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_principal_3() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_ratio.py b/ansys/dpf/core/operators/result/stress_ratio.py index f83496d55a6..fd5a3fcc84a 100644 --- a/ansys/dpf/core/operators/result/stress_ratio.py +++ b/ansys/dpf/core/operators/result/stress_ratio.py @@ -1,98 +1,274 @@ """ stress_ratio -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_ratio(Operator): - """Read/compute element nodal stress ratio by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_ratio() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_ratio(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ENL_SRAT", config = config, server = server) + """Read/compute element nodal stress ratio by calling the readers defined + by the datasources. Regarding the requested location and the input + mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_ratio() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_ratio( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ENL_SRAT", config=config, server=server) self._inputs = InputsStressRatio(self) self._outputs = OutputsStressRatio(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal stress ratio by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal stress ratio by calling the readers defined + by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENL_SRAT") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENL_SRAT", server=server) @property def inputs(self): @@ -100,301 +276,327 @@ def inputs(self): Returns -------- - inputs : InputsStressRatio + inputs : InputsStressRatio """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressRatio + outputs : OutputsStressRatio """ return super().outputs -#internal name: ENL_SRAT -#scripting name: stress_ratio class InputsStressRatio(_Inputs): - """Intermediate class used to connect user inputs to stress_ratio operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_ratio() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress_ratio operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_ratio() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(stress_ratio._spec().inputs, op) - self._time_scoping = Input(stress_ratio._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_ratio._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_ratio._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_ratio._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_ratio._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress_ratio._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_ratio._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(stress_ratio._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_ratio._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_ratio._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_ratio._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + stress_ratio._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_ratio._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_ratio._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_ratio._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(stress_ratio._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_ratio._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_ratio._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(stress_ratio._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_ratio() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsStressRatio(_Outputs): - """Intermediate class used to get outputs from stress_ratio operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_ratio() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_ratio operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_ratio() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_ratio._spec().outputs, op) - self._fields_container = Output(stress_ratio._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_ratio._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_ratio() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_rotation_by_euler_nodes.py b/ansys/dpf/core/operators/result/stress_rotation_by_euler_nodes.py index 933cf01b49c..803295a0e25 100644 --- a/ansys/dpf/core/operators/result/stress_rotation_by_euler_nodes.py +++ b/ansys/dpf/core/operators/result/stress_rotation_by_euler_nodes.py @@ -1,72 +1,131 @@ """ stress_rotation_by_euler_nodes -============================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from mapdlOperatorsCore plugin, from "result" category -""" class stress_rotation_by_euler_nodes(Operator): - """read Euler angles on elements from the rst file and rotate the fields in the fieldsContainer. - - available inputs: - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer, Stream) (optional) - - data_sources (DataSources) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_rotation_by_euler_nodes(fields_container=my_fields_container,streams_container=my_streams_container,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="mapdl::rst::S_rotation_by_euler_nodes", config = config, server = server) + """read Euler angles on elements from the rst file and rotate the fields + in the fieldsContainer. + + Parameters + ---------- + fields_container : FieldsContainer, optional + streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper, optional + data_sources : DataSources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_rotation_by_euler_nodes( + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__( + name="mapdl::rst::S_rotation_by_euler_nodes", config=config, server=server + ) self._inputs = InputsStressRotationByEulerNodes(self) self._outputs = OutputsStressRotationByEulerNodes(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""read Euler angles on elements from the rst file and rotate the fields in the fieldsContainer.""", - map_input_pin_spec={ - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document=""""""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container","stream"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """read Euler angles on elements from the rst file and rotate the fields + in the fieldsContainer.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="streams_container", + type_names=[ + "streams_container", + "stream", + "class dataProcessing::CRstFileWrapper", + ], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mapdl::rst::S_rotation_by_euler_nodes") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="mapdl::rst::S_rotation_by_euler_nodes", server=server + ) @property def inputs(self): @@ -74,139 +133,140 @@ def inputs(self): Returns -------- - inputs : InputsStressRotationByEulerNodes + inputs : InputsStressRotationByEulerNodes """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressRotationByEulerNodes + outputs : OutputsStressRotationByEulerNodes """ return super().outputs -#internal name: mapdl::rst::S_rotation_by_euler_nodes -#scripting name: stress_rotation_by_euler_nodes class InputsStressRotationByEulerNodes(_Inputs): - """Intermediate class used to connect user inputs to stress_rotation_by_euler_nodes operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + stress_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(stress_rotation_by_euler_nodes._spec().inputs, op) - self._fields_container = Input(stress_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + stress_rotation_by_euler_nodes._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + stress_rotation_by_euler_nodes._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + stress_rotation_by_euler_nodes._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, Stream, + my_streams_container : StreamsContainer or Stream or Class + Dataprocessing::Crstfilewrapper Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsStressRotationByEulerNodes(_Outputs): - """Intermediate class used to get outputs from stress_rotation_by_euler_nodes operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_rotation_by_euler_nodes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_rotation_by_euler_nodes._spec().outputs, op) - self._fields_container = Output(stress_rotation_by_euler_nodes._spec().output_pin(0), 0, op) + self._fields_container = Output( + stress_rotation_by_euler_nodes._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_rotation_by_euler_nodes() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/stress_von_mises.py b/ansys/dpf/core/operators/result/stress_von_mises.py index 52b20d23530..002f52506d5 100644 --- a/ansys/dpf/core/operators/result/stress_von_mises.py +++ b/ansys/dpf/core/operators/result/stress_von_mises.py @@ -1,96 +1,251 @@ """ stress_von_mises -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class stress_von_mises(Operator): - """Reads/computes element nodal component stresses, average it on nodes (by default) and computes its invariants. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.stress_von_mises() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.stress_von_mises(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="S_eqv", config = config, server = server) + """Reads/computes element nodal component stresses, average it on nodes + (by default) and computes its invariants. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.stress_von_mises() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.stress_von_mises( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="S_eqv", config=config, server=server) self._inputs = InputsStressVonMises(self) self._outputs = OutputsStressVonMises(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Reads/computes element nodal component stresses, average it on nodes (by default) and computes its invariants.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document=""""""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Reads/computes element nodal component stresses, average it on nodes + (by default) and computes its invariants.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "S_eqv") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="S_eqv", server=server) @property def inputs(self): @@ -98,299 +253,303 @@ def inputs(self): Returns -------- - inputs : InputsStressVonMises + inputs : InputsStressVonMises """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStressVonMises + outputs : OutputsStressVonMises """ return super().outputs -#internal name: S_eqv -#scripting name: stress_von_mises class InputsStressVonMises(_Inputs): - """Intermediate class used to connect user inputs to stress_von_mises operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_von_mises() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + stress_von_mises operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_von_mises() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(stress_von_mises._spec().inputs, op) - self._time_scoping = Input(stress_von_mises._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(stress_von_mises._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(stress_von_mises._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(stress_von_mises._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(stress_von_mises._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(stress_von_mises._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(stress_von_mises._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + stress_von_mises._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(stress_von_mises._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(stress_von_mises._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(stress_von_mises._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + stress_von_mises._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(stress_von_mises._spec().input_pin(7), 7, op, -1) + self._mesh = Input(stress_von_mises._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(stress_von_mises._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + stress_von_mises._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(stress_von_mises._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(stress_von_mises._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsStressVonMises(_Outputs): - """Intermediate class used to get outputs from stress_von_mises operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.stress_von_mises() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + stress_von_mises operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.stress_von_mises() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(stress_von_mises._spec().outputs, op) - self._fields_container = Output(stress_von_mises._spec().output_pin(0), 0, op) + self._fields_container = Output(stress_von_mises._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.stress_von_mises() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/structural_temperature.py b/ansys/dpf/core/operators/result/structural_temperature.py index df0b563656e..fdad3c5c21a 100644 --- a/ansys/dpf/core/operators/result/structural_temperature.py +++ b/ansys/dpf/core/operators/result/structural_temperature.py @@ -1,98 +1,274 @@ """ structural_temperature -====================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class structural_temperature(Operator): - """Read/compute element structural nodal temperatures by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.structural_temperature() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.structural_temperature(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="BFE", config = config, server = server) + """Read/compute element structural nodal temperatures by calling the + readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.structural_temperature() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.structural_temperature( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="BFE", config=config, server=server) self._inputs = InputsStructuralTemperature(self) self._outputs = OutputsStructuralTemperature(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element structural nodal temperatures by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element structural nodal temperatures by calling the + readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "BFE") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="BFE", server=server) @property def inputs(self): @@ -100,301 +276,345 @@ def inputs(self): Returns -------- - inputs : InputsStructuralTemperature + inputs : InputsStructuralTemperature """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStructuralTemperature + outputs : OutputsStructuralTemperature """ return super().outputs -#internal name: BFE -#scripting name: structural_temperature class InputsStructuralTemperature(_Inputs): - """Intermediate class used to connect user inputs to structural_temperature operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.structural_temperature() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + structural_temperature operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.structural_temperature() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(structural_temperature._spec().inputs, op) - self._time_scoping = Input(structural_temperature._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + structural_temperature._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(structural_temperature._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + structural_temperature._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(structural_temperature._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + structural_temperature._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(structural_temperature._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + structural_temperature._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(structural_temperature._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + structural_temperature._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(structural_temperature._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + structural_temperature._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(structural_temperature._spec().input_pin(7), 7, op, -1) + self._mesh = Input(structural_temperature._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(structural_temperature._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + structural_temperature._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) - self._read_cyclic = Input(structural_temperature._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + structural_temperature._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) + self._read_beams = Input( + structural_temperature._spec().input_pin(21), 21, op, -1 + ) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.structural_temperature() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsStructuralTemperature(_Outputs): - """Intermediate class used to get outputs from structural_temperature operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.structural_temperature() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + structural_temperature operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.structural_temperature() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(structural_temperature._spec().outputs, op) - self._fields_container = Output(structural_temperature._spec().output_pin(0), 0, op) + self._fields_container = Output( + structural_temperature._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.structural_temperature() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/swelling_strains.py b/ansys/dpf/core/operators/result/swelling_strains.py new file mode 100644 index 00000000000..f3a50996795 --- /dev/null +++ b/ansys/dpf/core/operators/result/swelling_strains.py @@ -0,0 +1,606 @@ +""" +swelling_strains +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class swelling_strains(Operator): + """Read/compute element nodal swelling strains by calling the readers + defined by the datasources. Regarding the requested location and + the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.swelling_strains() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.swelling_strains( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ETH_SWL", config=config, server=server) + self._inputs = InputsSwellingStrains(self) + self._outputs = OutputsSwellingStrains(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) + + @staticmethod + def _spec(): + description = """Read/compute element nodal swelling strains by calling the readers + defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ETH_SWL", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsSwellingStrains + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsSwellingStrains + """ + return super().outputs + + +class InputsSwellingStrains(_Inputs): + """Intermediate class used to connect user inputs to + swelling_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + """ + + def __init__(self, op: Operator): + super().__init__(swelling_strains._spec().inputs, op) + self._time_scoping = Input(swelling_strains._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input(swelling_strains._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input(swelling_strains._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._fields_container) + self._streams_container = Input( + swelling_strains._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(swelling_strains._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + swelling_strains._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(swelling_strains._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._requested_location = Input( + swelling_strains._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._read_cyclic = Input(swelling_strains._spec().input_pin(14), 14, op, -1) + self._inputs.append(self._read_cyclic) + self._read_beams = Input(swelling_strains._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Requested location nodal, elemental or + elementalnodal + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + +class OutputsSwellingStrains(_Outputs): + """Intermediate class used to get outputs from + swelling_strains operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(swelling_strains._spec().outputs, op) + self._fields_container = Output(swelling_strains._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.swelling_strains() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/temperature.py b/ansys/dpf/core/operators/result/temperature.py index 9e8f3d54c35..3f270213695 100644 --- a/ansys/dpf/core/operators/result/temperature.py +++ b/ansys/dpf/core/operators/result/temperature.py @@ -1,92 +1,238 @@ """ temperature -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class temperature(Operator): - """Read/compute temperature field by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.temperature() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.temperature(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="TEMP", config = config, server = server) + """Read/compute temperature field by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.temperature() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.temperature( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="TEMP", config=config, server=server) self._inputs = InputsTemperature(self) self._outputs = OutputsTemperature(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute temperature field by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute temperature field by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "TEMP") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="TEMP", server=server) @property def inputs(self): @@ -94,275 +240,275 @@ def inputs(self): Returns -------- - inputs : InputsTemperature + inputs : InputsTemperature """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsTemperature + outputs : OutputsTemperature """ return super().outputs -#internal name: TEMP -#scripting name: temperature class InputsTemperature(_Inputs): - """Intermediate class used to connect user inputs to temperature operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.temperature() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + temperature operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(temperature._spec().inputs, op) - self._time_scoping = Input(temperature._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(temperature._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(temperature._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(temperature._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(temperature._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(temperature._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(temperature._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(temperature._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(temperature._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(temperature._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(temperature._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(temperature._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(temperature._spec().input_pin(7), 7, op, -1) + self._mesh = Input(temperature._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(temperature._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(temperature._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsTemperature(_Outputs): - """Intermediate class used to get outputs from temperature operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.temperature() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + temperature operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(temperature._spec().outputs, op) - self._fields_container = Output(temperature._spec().output_pin(0), 0, op) + self._fields_container = Output(temperature._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.temperature() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/temperature_grad.py b/ansys/dpf/core/operators/result/temperature_grad.py new file mode 100644 index 00000000000..9a529d1a13c --- /dev/null +++ b/ansys/dpf/core/operators/result/temperature_grad.py @@ -0,0 +1,606 @@ +""" +temperature_grad +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class temperature_grad(Operator): + """Read/compute Temperature Gradient by calling the readers defined by + the datasources. Regarding the requested location and the input + mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.temperature_grad() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.temperature_grad( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="TG", config=config, server=server) + self._inputs = InputsTemperatureGrad(self) + self._outputs = OutputsTemperatureGrad(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) + + @staticmethod + def _spec(): + description = """Read/compute Temperature Gradient by calling the readers defined by + the datasources. Regarding the requested location and the + input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="TG", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsTemperatureGrad + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsTemperatureGrad + """ + return super().outputs + + +class InputsTemperatureGrad(_Inputs): + """Intermediate class used to connect user inputs to + temperature_grad operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + """ + + def __init__(self, op: Operator): + super().__init__(temperature_grad._spec().inputs, op) + self._time_scoping = Input(temperature_grad._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input(temperature_grad._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input(temperature_grad._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._fields_container) + self._streams_container = Input( + temperature_grad._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(temperature_grad._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + temperature_grad._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(temperature_grad._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._requested_location = Input( + temperature_grad._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._read_cyclic = Input(temperature_grad._spec().input_pin(14), 14, op, -1) + self._inputs.append(self._read_cyclic) + self._read_beams = Input(temperature_grad._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Requested location nodal, elemental or + elementalnodal + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + +class OutputsTemperatureGrad(_Outputs): + """Intermediate class used to get outputs from + temperature_grad operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(temperature_grad._spec().outputs, op) + self._fields_container = Output(temperature_grad._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.temperature_grad() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/thermal_dissipation_energy.py b/ansys/dpf/core/operators/result/thermal_dissipation_energy.py index b171308f970..9583be9879d 100644 --- a/ansys/dpf/core/operators/result/thermal_dissipation_energy.py +++ b/ansys/dpf/core/operators/result/thermal_dissipation_energy.py @@ -1,92 +1,238 @@ """ thermal_dissipation_energy -========================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class thermal_dissipation_energy(Operator): - """Read/compute thermal dissipation energy by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.thermal_dissipation_energy() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.thermal_dissipation_energy(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="ENG_TH", config = config, server = server) + """Read/compute thermal dissipation energy by calling the readers defined + by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.thermal_dissipation_energy() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.thermal_dissipation_energy( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="ENG_TH", config=config, server=server) self._inputs = InputsThermalDissipationEnergy(self) self._outputs = OutputsThermalDissipationEnergy(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute thermal dissipation energy by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute thermal dissipation energy by calling the readers defined + by the datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ENG_TH") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ENG_TH", server=server) @property def inputs(self): @@ -94,275 +240,291 @@ def inputs(self): Returns -------- - inputs : InputsThermalDissipationEnergy + inputs : InputsThermalDissipationEnergy """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsThermalDissipationEnergy + outputs : OutputsThermalDissipationEnergy """ return super().outputs -#internal name: ENG_TH -#scripting name: thermal_dissipation_energy class InputsThermalDissipationEnergy(_Inputs): - """Intermediate class used to connect user inputs to thermal_dissipation_energy operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.thermal_dissipation_energy() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + thermal_dissipation_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_dissipation_energy() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(thermal_dissipation_energy._spec().inputs, op) - self._time_scoping = Input(thermal_dissipation_energy._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input( + thermal_dissipation_energy._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_dissipation_energy._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input( + thermal_dissipation_energy._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(thermal_dissipation_energy._spec().input_pin(2), 2, op, -1) + self._fields_container = Input( + thermal_dissipation_energy._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._fields_container) - self._streams_container = Input(thermal_dissipation_energy._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + thermal_dissipation_energy._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_dissipation_energy._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + thermal_dissipation_energy._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(thermal_dissipation_energy._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + thermal_dissipation_energy._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_dissipation_energy._spec().input_pin(7), 7, op, -1) + self._mesh = Input(thermal_dissipation_energy._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(thermal_dissipation_energy._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input( + thermal_dissipation_energy._spec().input_pin(14), 14, op, -1 + ) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsThermalDissipationEnergy(_Outputs): - """Intermediate class used to get outputs from thermal_dissipation_energy operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.thermal_dissipation_energy() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + thermal_dissipation_energy operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_dissipation_energy() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(thermal_dissipation_energy._spec().outputs, op) - self._fields_container = Output(thermal_dissipation_energy._spec().output_pin(0), 0, op) + self._fields_container = Output( + thermal_dissipation_energy._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_dissipation_energy() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/thermal_strain.py b/ansys/dpf/core/operators/result/thermal_strain.py index bc8063c27ef..610ae80bce2 100644 --- a/ansys/dpf/core/operators/result/thermal_strain.py +++ b/ansys/dpf/core/operators/result/thermal_strain.py @@ -1,98 +1,274 @@ """ thermal_strain -============== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class thermal_strain(Operator): - """Read/compute element nodal component thermal strains by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - requested_location (str) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.thermal_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.thermal_strain(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="ETH", config = config, server = server) + """Read/compute element nodal component thermal strains by calling the + readers defined by the datasources. Regarding the requested + location and the input mesh scoping, the result location can be + Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.thermal_strain() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.thermal_strain( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ETH", config=config, server=server) self._inputs = InputsThermalStrain(self) self._outputs = OutputsThermalStrain(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) @staticmethod def _spec(): - spec = Specification(description="""Read/compute element nodal component thermal strains by calling the readers defined by the datasources. Regarding the requested location and the input mesh scoping, the result location can be Nodal/ElementalNodal/Elemental.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""requested location Nodal, Elemental or ElementalNodal"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute element nodal component thermal strains by calling the + readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result + location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ETH") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ETH", server=server) @property def inputs(self): @@ -100,301 +276,327 @@ def inputs(self): Returns -------- - inputs : InputsThermalStrain + inputs : InputsThermalStrain """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsThermalStrain + outputs : OutputsThermalStrain """ return super().outputs -#internal name: ETH -#scripting name: thermal_strain class InputsThermalStrain(_Inputs): - """Intermediate class used to connect user inputs to thermal_strain operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.thermal_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + thermal_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strain() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) """ + def __init__(self, op: Operator): super().__init__(thermal_strain._spec().inputs, op) - self._time_scoping = Input(thermal_strain._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(thermal_strain._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thermal_strain._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(thermal_strain._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(thermal_strain._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(thermal_strain._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(thermal_strain._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(thermal_strain._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(thermal_strain._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(thermal_strain._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(thermal_strain._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input( + thermal_strain._spec().input_pin(5), 5, op, -1 + ) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thermal_strain._spec().input_pin(7), 7, op, -1) + self._mesh = Input(thermal_strain._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(thermal_strain._spec().input_pin(9), 9, op, -1) + self._requested_location = Input(thermal_strain._spec().input_pin(9), 9, op, -1) self._inputs.append(self._requested_location) - self._read_cyclic = Input(thermal_strain._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(thermal_strain._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) + self._read_beams = Input(thermal_strain._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: requested location Nodal, Elemental or ElementalNodal + Requested location nodal, elemental or + elementalnodal Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strain() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + class OutputsThermalStrain(_Outputs): - """Intermediate class used to get outputs from thermal_strain operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.thermal_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + thermal_strain operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strain() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(thermal_strain._spec().outputs, op) - self._fields_container = Output(thermal_strain._spec().output_pin(0), 0, op) + self._fields_container = Output(thermal_strain._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thermal_strain() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/thermal_strains_eqv.py b/ansys/dpf/core/operators/result/thermal_strains_eqv.py new file mode 100644 index 00000000000..c57d8e6d494 --- /dev/null +++ b/ansys/dpf/core/operators/result/thermal_strains_eqv.py @@ -0,0 +1,610 @@ +""" +thermal_strains_eqv +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class thermal_strains_eqv(Operator): + """Read/compute element nodal equivalent component thermal strains by + calling the readers defined by the datasources. Regarding the + requested location and the input mesh scoping, the result location + can be Nodal/ElementalNodal/Elemental. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + requested_location : str, optional + Requested location nodal, elemental or + elementalnodal + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + read_beams : bool + Elemental nodal beam results are read if this + pin is set to true (default is false) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.thermal_strains_eqv() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.thermal_strains_eqv( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... read_cyclic=my_read_cyclic, + ... read_beams=my_read_beams, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + requested_location=None, + read_cyclic=None, + read_beams=None, + config=None, + server=None, + ): + super().__init__(name="ETH_EQV", config=config, server=server) + self._inputs = InputsThermalStrainsEqv(self) + self._outputs = OutputsThermalStrainsEqv(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) + if read_beams is not None: + self.inputs.read_beams.connect(read_beams) + + @staticmethod + def _spec(): + description = """Read/compute element nodal equivalent component thermal strains by + calling the readers defined by the datasources. Regarding + the requested location and the input mesh scoping, the + result location can be Nodal/ElementalNodal/Elemental.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Requested location nodal, elemental or + elementalnodal""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + 21: PinSpecification( + name="read_beams", + type_names=["bool"], + optional=False, + document="""Elemental nodal beam results are read if this + pin is set to true (default is false)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ETH_EQV", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsThermalStrainsEqv + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsThermalStrainsEqv + """ + return super().outputs + + +class InputsThermalStrainsEqv(_Inputs): + """Intermediate class used to connect user inputs to + thermal_strains_eqv operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> my_read_beams = bool() + >>> op.inputs.read_beams.connect(my_read_beams) + """ + + def __init__(self, op: Operator): + super().__init__(thermal_strains_eqv._spec().inputs, op) + self._time_scoping = Input(thermal_strains_eqv._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input(thermal_strains_eqv._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh_scoping) + self._fields_container = Input( + thermal_strains_eqv._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._fields_container) + self._streams_container = Input( + thermal_strains_eqv._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input(thermal_strains_eqv._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._bool_rotate_to_global = Input( + thermal_strains_eqv._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._bool_rotate_to_global) + self._mesh = Input(thermal_strains_eqv._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._requested_location = Input( + thermal_strains_eqv._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._read_cyclic = Input(thermal_strains_eqv._spec().input_pin(14), 14, op, -1) + self._inputs.append(self._read_cyclic) + self._read_beams = Input(thermal_strains_eqv._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._read_beams) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + + Parameters + ---------- + my_time_scoping : Scoping or int or float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + + Parameters + ---------- + my_mesh_scoping : ScopingsContainer or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Fields container already allocated modified + inplace + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def bool_rotate_to_global(self): + """Allows to connect bool_rotate_to_global input to the operator. + + If true the field is rotated to global + coordinate system (default true) + + Parameters + ---------- + my_bool_rotate_to_global : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> # or + >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) + """ + return self._bool_rotate_to_global + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Prevents from reading the mesh in the result + files + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Requested location nodal, elemental or + elementalnodal + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def read_cyclic(self): + """Allows to connect read_cyclic input to the operator. + + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + Parameters + ---------- + my_read_cyclic : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + >>> # or + >>> op.inputs.read_cyclic(my_read_cyclic) + """ + return self._read_cyclic + + @property + def read_beams(self): + """Allows to connect read_beams input to the operator. + + Elemental nodal beam results are read if this + pin is set to true (default is false) + + Parameters + ---------- + my_read_beams : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> op.inputs.read_beams.connect(my_read_beams) + >>> # or + >>> op.inputs.read_beams(my_read_beams) + """ + return self._read_beams + + +class OutputsThermalStrainsEqv(_Outputs): + """Intermediate class used to get outputs from + thermal_strains_eqv operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(thermal_strains_eqv._spec().outputs, op) + self._fields_container = Output( + thermal_strains_eqv._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thermal_strains_eqv() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/result/thickness.py b/ansys/dpf/core/operators/result/thickness.py index 7143edef1f1..2c2c6f802c3 100644 --- a/ansys/dpf/core/operators/result/thickness.py +++ b/ansys/dpf/core/operators/result/thickness.py @@ -1,92 +1,238 @@ """ thickness -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class thickness(Operator): - """Read/compute thickness by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.thickness() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.thickness(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="thickness", config = config, server = server) + """Read/compute thickness by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.thickness() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.thickness( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="thickness", config=config, server=server) self._inputs = InputsThickness(self) self._outputs = OutputsThickness(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute thickness by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute thickness by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "thickness") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="thickness", server=server) @property def inputs(self): @@ -94,275 +240,275 @@ def inputs(self): Returns -------- - inputs : InputsThickness + inputs : InputsThickness """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsThickness + outputs : OutputsThickness """ return super().outputs -#internal name: thickness -#scripting name: thickness class InputsThickness(_Inputs): - """Intermediate class used to connect user inputs to thickness operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.thickness() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + thickness operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thickness() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(thickness._spec().inputs, op) - self._time_scoping = Input(thickness._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(thickness._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(thickness._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(thickness._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(thickness._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(thickness._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(thickness._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(thickness._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(thickness._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(thickness._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(thickness._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(thickness._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(thickness._spec().input_pin(7), 7, op, -1) + self._mesh = Input(thickness._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(thickness._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(thickness._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsThickness(_Outputs): - """Intermediate class used to get outputs from thickness operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.thickness() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + thickness operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.thickness() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(thickness._spec().outputs, op) - self._fields_container = Output(thickness._spec().output_pin(0), 0, op) + self._fields_container = Output(thickness._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.thickness() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/torque.py b/ansys/dpf/core/operators/result/torque.py index a13190365a9..7d8759d8de3 100644 --- a/ansys/dpf/core/operators/result/torque.py +++ b/ansys/dpf/core/operators/result/torque.py @@ -1,66 +1,103 @@ """ torque -====== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "result" category -""" class torque(Operator): """Compute torque of a force based on a 3D point. - available inputs: - - fields_container (FieldsContainer) - - vector_of_double (list) + Parameters + ---------- + fields_container : FieldsContainer + Fields_container + field : Field + Field + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.result.torque() - >>> # Instantiate operator - >>> op = dpf.operators.result.torque() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_vector_of_double = dpf.list() - >>> op.inputs.vector_of_double.connect(my_vector_of_double) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.torque( + ... fields_container=my_fields_container, + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.torque(fields_container=my_fields_container,vector_of_double=my_vector_of_double) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, vector_of_double=None, config=None, server=None): - super().__init__(name="torque", config = config, server = server) + def __init__(self, fields_container=None, field=None, config=None, server=None): + super().__init__(name="torque", config=config, server=server) self._inputs = InputsTorque(self) self._outputs = OutputsTorque(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if vector_of_double !=None: - self.inputs.vector_of_double.connect(vector_of_double) + if field is not None: + self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Compute torque of a force based on a 3D point.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""fields_container"""), - 1 : PinSpecification(name = "vector_of_double", type_names=["vector"], optional=False, document="""vector_of_double""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Compute torque of a force based on a 3D point.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fields_container""", + ), + 1: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""Field""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "torque") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="torque", server=server) @property def inputs(self): @@ -68,119 +105,113 @@ def inputs(self): Returns -------- - inputs : InputsTorque + inputs : InputsTorque """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsTorque + outputs : OutputsTorque """ return super().outputs -#internal name: torque -#scripting name: torque class InputsTorque(_Inputs): - """Intermediate class used to connect user inputs to torque operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.torque() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_vector_of_double = dpf.list() - >>> op.inputs.vector_of_double.connect(my_vector_of_double) + """Intermediate class used to connect user inputs to + torque operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.torque() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(torque._spec().inputs, op) - self._fields_container = Input(torque._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(torque._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._vector_of_double = Input(torque._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._vector_of_double) + self._field = Input(torque._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._field) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: fields_container + Fields_container Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.torque() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property - def vector_of_double(self): - """Allows to connect vector_of_double input to the operator + def field(self): + """Allows to connect field input to the operator. - - pindoc: vector_of_double + Field Parameters ---------- - my_vector_of_double : list, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.torque() - >>> op.inputs.vector_of_double.connect(my_vector_of_double) - >>> #or - >>> op.inputs.vector_of_double(my_vector_of_double) - + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) """ - return self._vector_of_double + return self._field + class OutputsTorque(_Outputs): - """Intermediate class used to get outputs from torque operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.torque() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + torque operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.torque() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(torque._spec().outputs, op) - self._fields_container = Output(torque._spec().output_pin(0), 0, op) + self._fields_container = Output(torque._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.torque() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/velocity.py b/ansys/dpf/core/operators/result/velocity.py index d1973ac7e37..92d119e446c 100644 --- a/ansys/dpf/core/operators/result/velocity.py +++ b/ansys/dpf/core/operators/result/velocity.py @@ -1,92 +1,238 @@ """ velocity -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class velocity(Operator): - """Read/compute nodal velocities by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.velocity() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.velocity(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="V", config = config, server = server) + """Read/compute nodal velocities by calling the readers defined by the + datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fields container already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.velocity() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.velocity( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="V", config=config, server=server) self._inputs = InputsVelocity(self) self._outputs = OutputsVelocity(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal velocities by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""Fields container already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal velocities by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fields container already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "V") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="V", server=server) @property def inputs(self): @@ -94,275 +240,275 @@ def inputs(self): Returns -------- - inputs : InputsVelocity + inputs : InputsVelocity """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsVelocity + outputs : OutputsVelocity """ return super().outputs -#internal name: V -#scripting name: velocity class InputsVelocity(_Inputs): - """Intermediate class used to connect user inputs to velocity operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.velocity() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + velocity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.velocity() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(velocity._spec().inputs, op) - self._time_scoping = Input(velocity._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(velocity._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(velocity._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(velocity._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(velocity._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(velocity._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(velocity._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(velocity._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(velocity._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(velocity._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(velocity._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(velocity._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(velocity._spec().input_pin(7), 7, op, -1) + self._mesh = Input(velocity._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(velocity._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(velocity._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: Fields container already allocated modified inplace + Fields container already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsVelocity(_Outputs): - """Intermediate class used to get outputs from velocity operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.velocity() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + velocity operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.velocity() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(velocity._spec().outputs, op) - self._fields_container = Output(velocity._spec().output_pin(0), 0, op) + self._fields_container = Output(velocity._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/velocity_X.py b/ansys/dpf/core/operators/result/velocity_X.py index 056113d8ced..2555fa6eff1 100644 --- a/ansys/dpf/core/operators/result/velocity_X.py +++ b/ansys/dpf/core/operators/result/velocity_X.py @@ -1,92 +1,239 @@ """ velocity_X -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class velocity_X(Operator): - """Read/compute nodal velocities X component of the vector (1st component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.velocity_X() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.velocity_X(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="VX", config = config, server = server) + """Read/compute nodal velocities X component of the vector (1st + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.velocity_X() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.velocity_X( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="VX", config=config, server=server) self._inputs = InputsVelocityX(self) self._outputs = OutputsVelocityX(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal velocities X component of the vector (1st component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal velocities X component of the vector (1st + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "VX") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="VX", server=server) @property def inputs(self): @@ -94,275 +241,275 @@ def inputs(self): Returns -------- - inputs : InputsVelocityX + inputs : InputsVelocityX """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsVelocityX + outputs : OutputsVelocityX """ return super().outputs -#internal name: VX -#scripting name: velocity_X class InputsVelocityX(_Inputs): - """Intermediate class used to connect user inputs to velocity_X operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.velocity_X() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + velocity_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.velocity_X() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(velocity_X._spec().inputs, op) - self._time_scoping = Input(velocity_X._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(velocity_X._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(velocity_X._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(velocity_X._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(velocity_X._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(velocity_X._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(velocity_X._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(velocity_X._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(velocity_X._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(velocity_X._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(velocity_X._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(velocity_X._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(velocity_X._spec().input_pin(7), 7, op, -1) + self._mesh = Input(velocity_X._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(velocity_X._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(velocity_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsVelocityX(_Outputs): - """Intermediate class used to get outputs from velocity_X operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.velocity_X() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + velocity_X operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.velocity_X() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(velocity_X._spec().outputs, op) - self._fields_container = Output(velocity_X._spec().output_pin(0), 0, op) + self._fields_container = Output(velocity_X._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_X() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/velocity_Y.py b/ansys/dpf/core/operators/result/velocity_Y.py index 8a3d1c089ff..44163312515 100644 --- a/ansys/dpf/core/operators/result/velocity_Y.py +++ b/ansys/dpf/core/operators/result/velocity_Y.py @@ -1,92 +1,239 @@ """ velocity_Y -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class velocity_Y(Operator): - """Read/compute nodal velocities Y component of the vector (2nd component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.velocity_Y() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.velocity_Y(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="VY", config = config, server = server) + """Read/compute nodal velocities Y component of the vector (2nd + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.velocity_Y() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.velocity_Y( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="VY", config=config, server=server) self._inputs = InputsVelocityY(self) self._outputs = OutputsVelocityY(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal velocities Y component of the vector (2nd component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal velocities Y component of the vector (2nd + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "VY") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="VY", server=server) @property def inputs(self): @@ -94,275 +241,275 @@ def inputs(self): Returns -------- - inputs : InputsVelocityY + inputs : InputsVelocityY """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsVelocityY + outputs : OutputsVelocityY """ return super().outputs -#internal name: VY -#scripting name: velocity_Y class InputsVelocityY(_Inputs): - """Intermediate class used to connect user inputs to velocity_Y operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.velocity_Y() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + velocity_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.velocity_Y() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(velocity_Y._spec().inputs, op) - self._time_scoping = Input(velocity_Y._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(velocity_Y._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(velocity_Y._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(velocity_Y._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(velocity_Y._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(velocity_Y._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(velocity_Y._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(velocity_Y._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(velocity_Y._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(velocity_Y._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(velocity_Y._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(velocity_Y._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(velocity_Y._spec().input_pin(7), 7, op, -1) + self._mesh = Input(velocity_Y._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(velocity_Y._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(velocity_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsVelocityY(_Outputs): - """Intermediate class used to get outputs from velocity_Y operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.velocity_Y() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + velocity_Y operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.velocity_Y() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(velocity_Y._spec().outputs, op) - self._fields_container = Output(velocity_Y._spec().output_pin(0), 0, op) + self._fields_container = Output(velocity_Y._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Y() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/result/velocity_Z.py b/ansys/dpf/core/operators/result/velocity_Z.py index a424cc205f7..87dd122b041 100644 --- a/ansys/dpf/core/operators/result/velocity_Z.py +++ b/ansys/dpf/core/operators/result/velocity_Z.py @@ -1,92 +1,239 @@ """ velocity_Z -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "result" category -""" class velocity_Z(Operator): - """Read/compute nodal velocities Z component of the vector (3rd component) by calling the readers defined by the datasources. - - available inputs: - - time_scoping (Scoping, int, listfloat, Field, list) (optional) - - mesh_scoping (ScopingsContainer, Scoping) (optional) - - fields_container (FieldsContainer) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - bool_rotate_to_global (bool) (optional) - - mesh (MeshedRegion, MeshesContainer) (optional) - - read_cyclic (int) (optional) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.velocity_Z() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.velocity_Z(time_scoping=my_time_scoping,mesh_scoping=my_mesh_scoping,data_sources=my_data_sources) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, time_scoping=None, mesh_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="VZ", config = config, server = server) + """Read/compute nodal velocities Z component of the vector (3rd + component) by calling the readers defined by the datasources. + + Parameters + ---------- + time_scoping : Scoping or int or float or Field, optional + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output + mesh_scoping : ScopingsContainer or Scoping, optional + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains + fields_container : FieldsContainer, optional + Fieldscontainer already allocated modified + inplace + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + bool_rotate_to_global : bool, optional + If true the field is rotated to global + coordinate system (default true) + mesh : MeshedRegion or MeshesContainer, optional + Prevents from reading the mesh in the result + files + read_cyclic : int, optional + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.velocity_Z() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.velocity_Z( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... fields_container=my_fields_container, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... bool_rotate_to_global=my_bool_rotate_to_global, + ... mesh=my_mesh, + ... read_cyclic=my_read_cyclic, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + fields_container=None, + streams_container=None, + data_sources=None, + bool_rotate_to_global=None, + mesh=None, + read_cyclic=None, + config=None, + server=None, + ): + super().__init__(name="VZ", config=config, server=server) self._inputs = InputsVelocityZ(self) self._outputs = OutputsVelocityZ(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if data_sources !=None: + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: self.inputs.data_sources.connect(data_sources) + if bool_rotate_to_global is not None: + self.inputs.bool_rotate_to_global.connect(bool_rotate_to_global) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if read_cyclic is not None: + self.inputs.read_cyclic.connect(read_cyclic) @staticmethod def _spec(): - spec = Specification(description="""Read/compute nodal velocities Z component of the vector (3rd component) by calling the readers defined by the datasources.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping","int32","vector","double","field","vector"], optional=True, document="""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output"""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container","scoping"], optional=True, document="""nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains"""), - 2 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=True, document="""FieldsContainer already allocated modified inplace"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document="""result file container allowed to be kept open to cache data"""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""result file path container, used if no streams are set"""), - 5 : PinSpecification(name = "bool_rotate_to_global", type_names=["bool"], optional=True, document="""if true the field is rotated to global coordinate system (default true)"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region","meshes_container"], optional=True, document="""prevents from reading the mesh in the result files"""), - 14 : PinSpecification(name = "read_cyclic", type_names=["int32"], optional=True, document="""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Read/compute nodal velocities Z component of the vector (3rd + component) by calling the readers defined by the + datasources.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=[ + "scoping", + "int32", + "vector", + "double", + "field", + "vector", + ], + optional=True, + document="""Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container", "scoping"], + optional=True, + document="""Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains""", + ), + 2: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=True, + document="""Fieldscontainer already allocated modified + inplace""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 5: PinSpecification( + name="bool_rotate_to_global", + type_names=["bool"], + optional=True, + document="""If true the field is rotated to global + coordinate system (default true)""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=True, + document="""Prevents from reading the mesh in the result + files""", + ), + 14: PinSpecification( + name="read_cyclic", + type_names=["enum dataProcessing::ECyclicReading", "int32"], + optional=True, + document="""If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "VZ") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="VZ", server=server) @property def inputs(self): @@ -94,275 +241,275 @@ def inputs(self): Returns -------- - inputs : InputsVelocityZ + inputs : InputsVelocityZ """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsVelocityZ + outputs : OutputsVelocityZ """ return super().outputs -#internal name: VZ -#scripting name: velocity_Z class InputsVelocityZ(_Inputs): - """Intermediate class used to connect user inputs to velocity_Z operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.velocity_Z() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_bool_rotate_to_global = bool() - >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_read_cyclic = int() - >>> op.inputs.read_cyclic.connect(my_read_cyclic) + """Intermediate class used to connect user inputs to + velocity_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.velocity_Z() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.ScopingsContainer() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_bool_rotate_to_global = bool() + >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_read_cyclic = int() + >>> op.inputs.read_cyclic.connect(my_read_cyclic) """ + def __init__(self, op: Operator): super().__init__(velocity_Z._spec().inputs, op) - self._time_scoping = Input(velocity_Z._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(velocity_Z._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._mesh_scoping = Input(velocity_Z._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(velocity_Z._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._fields_container = Input(velocity_Z._spec().input_pin(2), 2, op, -1) + self._fields_container = Input(velocity_Z._spec().input_pin(2), 2, op, -1) self._inputs.append(self._fields_container) - self._streams_container = Input(velocity_Z._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(velocity_Z._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(velocity_Z._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(velocity_Z._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._bool_rotate_to_global = Input(velocity_Z._spec().input_pin(5), 5, op, -1) + self._bool_rotate_to_global = Input(velocity_Z._spec().input_pin(5), 5, op, -1) self._inputs.append(self._bool_rotate_to_global) - self._mesh = Input(velocity_Z._spec().input_pin(7), 7, op, -1) + self._mesh = Input(velocity_Z._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._read_cyclic = Input(velocity_Z._spec().input_pin(14), 14, op, -1) + self._read_cyclic = Input(velocity_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. - - pindoc: time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) requiered in output + Time/freq (use doubles or field), time/freq + set ids (use ints or scoping) or + time/freq step ids (use scoping with + timefreq_steps location) required in + output Parameters ---------- - my_time_scoping : Scoping, int, list, float, Field, list, + my_time_scoping : Scoping or int or float or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: nodes or elements scoping requiered in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains + Nodes or elements scoping required in output. + the scoping's location indicates + whether nodes or elements are asked. + using scopings container enables to + split the result fields container in + domains Parameters ---------- - my_mesh_scoping : ScopingsContainer, Scoping, + my_mesh_scoping : ScopingsContainer or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: FieldsContainer already allocated modified inplace + Fieldscontainer already allocated modified + inplace Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. - - pindoc: result file container allowed to be kept open to cache data + Result file container allowed to be kept open + to cache data Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: result file path container, used if no streams are set + Result file path container, used if no + streams are set Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def bool_rotate_to_global(self): - """Allows to connect bool_rotate_to_global input to the operator + """Allows to connect bool_rotate_to_global input to the operator. - - pindoc: if true the field is rotated to global coordinate system (default true) + If true the field is rotated to global + coordinate system (default true) Parameters ---------- - my_bool_rotate_to_global : bool, + my_bool_rotate_to_global : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> op.inputs.bool_rotate_to_global.connect(my_bool_rotate_to_global) - >>> #or + >>> # or >>> op.inputs.bool_rotate_to_global(my_bool_rotate_to_global) - """ return self._bool_rotate_to_global @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: prevents from reading the mesh in the result files + Prevents from reading the mesh in the result + files Parameters ---------- - my_mesh : MeshedRegion, MeshesContainer, + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def read_cyclic(self): - """Allows to connect read_cyclic input to the operator + """Allows to connect read_cyclic input to the operator. - - pindoc: if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1) + If 0 cyclic symmetry is ignored, if 1 cyclic + sector is read, if 2 cyclic expansion + is done, if 3 cyclic expansion is + done and stages are merged (default + is 1) Parameters ---------- - my_read_cyclic : int, + my_read_cyclic : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> op.inputs.read_cyclic.connect(my_read_cyclic) - >>> #or + >>> # or >>> op.inputs.read_cyclic(my_read_cyclic) - """ return self._read_cyclic + class OutputsVelocityZ(_Outputs): - """Intermediate class used to get outputs from velocity_Z operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.result.velocity_Z() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + velocity_Z operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.velocity_Z() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(velocity_Z._spec().outputs, op) - self._fields_container = Output(velocity_Z._spec().output_pin(0), 0, op) + self._fields_container = Output(velocity_Z._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.velocity_Z() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/scoping/__init__.py b/ansys/dpf/core/operators/scoping/__init__.py index 4b876393fae..d6a70ccc593 100644 --- a/ansys/dpf/core/operators/scoping/__init__.py +++ b/ansys/dpf/core/operators/scoping/__init__.py @@ -4,9 +4,10 @@ from .on_property import on_property from .transpose import transpose from .rescope_fc import rescope_fc -from .splitted_on_property_type import splitted_on_property_type from .rescope import rescope +from .split_on_property_type import split_on_property_type from .on_named_selection import on_named_selection from .nodal_from_mesh import nodal_from_mesh from .change_fc import change_fc +from .on_mesh_property import on_mesh_property from .from_mesh import from_mesh diff --git a/ansys/dpf/core/operators/scoping/change_fc.py b/ansys/dpf/core/operators/scoping/change_fc.py index 3823e4d8579..15ba9bccd0b 100644 --- a/ansys/dpf/core/operators/scoping/change_fc.py +++ b/ansys/dpf/core/operators/scoping/change_fc.py @@ -1,66 +1,105 @@ """ change_fc -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class change_fc(Operator): - """Rescope a fields container to correspond to a scopings container + """Rescope / split a fields container to correspond to a scopings + container + + Parameters + ---------- + fields_container : FieldsContainer + scopings_container : ScopingsContainer - available inputs: - - fields_container (FieldsContainer) - - scopings_container (ScopingsContainer) - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.scoping.change_fc() - >>> # Instantiate operator - >>> op = dpf.operators.scoping.change_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_scopings_container = dpf.ScopingsContainer() + >>> op.inputs.scopings_container.connect(my_scopings_container) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_scopings_container = dpf.ScopingsContainer() - >>> op.inputs.scopings_container.connect(my_scopings_container) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.change_fc( + ... fields_container=my_fields_container, + ... scopings_container=my_scopings_container, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.change_fc(fields_container=my_fields_container,scopings_container=my_scopings_container) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, scopings_container=None, config=None, server=None): - super().__init__(name="rescope_fc", config = config, server = server) + def __init__( + self, fields_container=None, scopings_container=None, config=None, server=None + ): + super().__init__(name="rescope_fc", config=config, server=server) self._inputs = InputsChangeFc(self) self._outputs = OutputsChangeFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if scopings_container !=None: + if scopings_container is not None: self.inputs.scopings_container.connect(scopings_container) @staticmethod def _spec(): - spec = Specification(description="""Rescope a fields container to correspond to a scopings container""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "scopings_container", type_names=["scopings_container"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Rescope / split a fields container to correspond to a scopings + container""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="scopings_container", + type_names=["scopings_container"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "rescope_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="rescope_fc", server=server) @property def inputs(self): @@ -68,115 +107,109 @@ def inputs(self): Returns -------- - inputs : InputsChangeFc + inputs : InputsChangeFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsChangeFc + outputs : OutputsChangeFc """ return super().outputs -#internal name: rescope_fc -#scripting name: change_fc class InputsChangeFc(_Inputs): - """Intermediate class used to connect user inputs to change_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.change_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_scopings_container = dpf.ScopingsContainer() - >>> op.inputs.scopings_container.connect(my_scopings_container) + """Intermediate class used to connect user inputs to + change_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.change_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_scopings_container = dpf.ScopingsContainer() + >>> op.inputs.scopings_container.connect(my_scopings_container) """ + def __init__(self, op: Operator): super().__init__(change_fc._spec().inputs, op) - self._fields_container = Input(change_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(change_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._scopings_container = Input(change_fc._spec().input_pin(1), 1, op, -1) + self._scopings_container = Input(change_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._scopings_container) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.change_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def scopings_container(self): - """Allows to connect scopings_container input to the operator + """Allows to connect scopings_container input to the operator. Parameters ---------- - my_scopings_container : ScopingsContainer, + my_scopings_container : ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.change_fc() >>> op.inputs.scopings_container.connect(my_scopings_container) - >>> #or + >>> # or >>> op.inputs.scopings_container(my_scopings_container) - """ return self._scopings_container + class OutputsChangeFc(_Outputs): - """Intermediate class used to get outputs from change_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.change_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + change_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.change_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(change_fc._spec().outputs, op) - self._fields_container = Output(change_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(change_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.change_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/scoping/connectivity_ids.py b/ansys/dpf/core/operators/scoping/connectivity_ids.py index 2ff820744b7..20e215e9a1f 100644 --- a/ansys/dpf/core/operators/scoping/connectivity_ids.py +++ b/ansys/dpf/core/operators/scoping/connectivity_ids.py @@ -1,75 +1,137 @@ """ connectivity_ids -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class connectivity_ids(Operator): - """Returns the ordered node ids corresponding to the element ids scoping in input. For each element the node ids are its connectivity. - - available inputs: - - mesh_scoping (Scoping) - - mesh (MeshedRegion) (optional) - - take_mid_nodes (bool) (optional) - - available outputs: - - mesh_scoping (Scoping) - - elemental_scoping (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.scoping.connectivity_ids() - - >>> # Make input connections - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_take_mid_nodes = bool() - >>> op.inputs.take_mid_nodes.connect(my_take_mid_nodes) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.connectivity_ids(mesh_scoping=my_mesh_scoping,mesh=my_mesh,take_mid_nodes=my_take_mid_nodes) - - >>> # Get output data - >>> result_mesh_scoping = op.outputs.mesh_scoping() - >>> result_elemental_scoping = op.outputs.elemental_scoping()""" - def __init__(self, mesh_scoping=None, mesh=None, take_mid_nodes=None, config=None, server=None): - super().__init__(name="scoping::connectivity_ids", config = config, server = server) + """Returns the ordered node ids corresponding to the element ids scoping + in input. For each element the node ids are its connectivity. + + Parameters + ---------- + mesh_scoping : Scoping + Elemental scoping + mesh : MeshedRegion, optional + The support of the scoping is expected if + there is no mesh in input + take_mid_nodes : bool, optional + Default is true + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.scoping.connectivity_ids() + + >>> # Make input connections + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_take_mid_nodes = bool() + >>> op.inputs.take_mid_nodes.connect(my_take_mid_nodes) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.connectivity_ids( + ... mesh_scoping=my_mesh_scoping, + ... mesh=my_mesh, + ... take_mid_nodes=my_take_mid_nodes, + ... ) + + >>> # Get output data + >>> result_mesh_scoping = op.outputs.mesh_scoping() + >>> result_elemental_scoping = op.outputs.elemental_scoping() + """ + + def __init__( + self, + mesh_scoping=None, + mesh=None, + take_mid_nodes=None, + config=None, + server=None, + ): + super().__init__(name="scoping::connectivity_ids", config=config, server=server) self._inputs = InputsConnectivityIds(self) self._outputs = OutputsConnectivityIds(self) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if take_mid_nodes !=None: + if take_mid_nodes is not None: self.inputs.take_mid_nodes.connect(take_mid_nodes) @staticmethod def _spec(): - spec = Specification(description="""Returns the ordered node ids corresponding to the element ids scoping in input. For each element the node ids are its connectivity.""", - map_input_pin_spec={ - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=False, document="""Elemental scoping"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""the support of the scoping is expected if there is no mesh in input"""), - 10 : PinSpecification(name = "take_mid_nodes", type_names=["bool"], optional=True, document="""default is true""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=False, document=""""""), - 1 : PinSpecification(name = "elemental_scoping", type_names=["scoping"], optional=False, document="""same as the input scoping but with ids dupplicated to havve the same size as nodal output scoping""")}) + description = """Returns the ordered node ids corresponding to the element ids scoping + in input. For each element the node ids are its + connectivity.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=False, + document="""Elemental scoping""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""The support of the scoping is expected if + there is no mesh in input""", + ), + 10: PinSpecification( + name="take_mid_nodes", + type_names=["bool"], + optional=True, + document="""Default is true""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="elemental_scoping", + type_names=["scoping"], + optional=False, + document="""Same as the input scoping but with ids + duplicated to have the same size as + nodal output scoping""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scoping::connectivity_ids") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scoping::connectivity_ids", server=server) @property def inputs(self): @@ -77,169 +139,158 @@ def inputs(self): Returns -------- - inputs : InputsConnectivityIds + inputs : InputsConnectivityIds """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsConnectivityIds + outputs : OutputsConnectivityIds """ return super().outputs -#internal name: scoping::connectivity_ids -#scripting name: connectivity_ids class InputsConnectivityIds(_Inputs): - """Intermediate class used to connect user inputs to connectivity_ids operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.connectivity_ids() - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_take_mid_nodes = bool() - >>> op.inputs.take_mid_nodes.connect(my_take_mid_nodes) + """Intermediate class used to connect user inputs to + connectivity_ids operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.connectivity_ids() + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_take_mid_nodes = bool() + >>> op.inputs.take_mid_nodes.connect(my_take_mid_nodes) """ + def __init__(self, op: Operator): super().__init__(connectivity_ids._spec().inputs, op) - self._mesh_scoping = Input(connectivity_ids._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(connectivity_ids._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._mesh = Input(connectivity_ids._spec().input_pin(7), 7, op, -1) + self._mesh = Input(connectivity_ids._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) - self._take_mid_nodes = Input(connectivity_ids._spec().input_pin(10), 10, op, -1) + self._take_mid_nodes = Input(connectivity_ids._spec().input_pin(10), 10, op, -1) self._inputs.append(self._take_mid_nodes) @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: Elemental scoping + Elemental scoping Parameters ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.connectivity_ids() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: the support of the scoping is expected if there is no mesh in input + The support of the scoping is expected if + there is no mesh in input Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.connectivity_ids() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def take_mid_nodes(self): - """Allows to connect take_mid_nodes input to the operator + """Allows to connect take_mid_nodes input to the operator. - - pindoc: default is true + Default is true Parameters ---------- - my_take_mid_nodes : bool, + my_take_mid_nodes : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.connectivity_ids() >>> op.inputs.take_mid_nodes.connect(my_take_mid_nodes) - >>> #or + >>> # or >>> op.inputs.take_mid_nodes(my_take_mid_nodes) - """ return self._take_mid_nodes + class OutputsConnectivityIds(_Outputs): - """Intermediate class used to get outputs from connectivity_ids operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.connectivity_ids() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() - >>> result_elemental_scoping = op.outputs.elemental_scoping() + """Intermediate class used to get outputs from + connectivity_ids operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.connectivity_ids() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() + >>> result_elemental_scoping = op.outputs.elemental_scoping() """ + def __init__(self, op: Operator): super().__init__(connectivity_ids._spec().outputs, op) - self._mesh_scoping = Output(connectivity_ids._spec().output_pin(0), 0, op) + self._mesh_scoping = Output(connectivity_ids._spec().output_pin(0), 0, op) self._outputs.append(self._mesh_scoping) - self._elemental_scoping = Output(connectivity_ids._spec().output_pin(1), 1, op) + self._elemental_scoping = Output(connectivity_ids._spec().output_pin(1), 1, op) self._outputs.append(self._elemental_scoping) @property def mesh_scoping(self): """Allows to get mesh_scoping output of the operator - Returns ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.connectivity_ids() >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() - """ + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ # noqa: E501 return self._mesh_scoping @property def elemental_scoping(self): """Allows to get elemental_scoping output of the operator - - - pindoc: same as the input scoping but with ids dupplicated to havve the same size as nodal output scoping - Returns ---------- - my_elemental_scoping : Scoping, + my_elemental_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.connectivity_ids() >>> # Connect inputs : op.inputs. ... - >>> result_elemental_scoping = op.outputs.elemental_scoping() - """ + >>> result_elemental_scoping = op.outputs.elemental_scoping() + """ # noqa: E501 return self._elemental_scoping - diff --git a/ansys/dpf/core/operators/scoping/elemental_from_mesh.py b/ansys/dpf/core/operators/scoping/elemental_from_mesh.py index 56460af826b..2ba439e760c 100644 --- a/ansys/dpf/core/operators/scoping/elemental_from_mesh.py +++ b/ansys/dpf/core/operators/scoping/elemental_from_mesh.py @@ -1,60 +1,89 @@ """ elemental_from_mesh -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class elemental_from_mesh(Operator): """Get the elements ids scoping of a given input mesh. - available inputs: - - mesh (MeshedRegion) + Parameters + ---------- + mesh : MeshedRegion + - available outputs: - - mesh_scoping (Scoping) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.scoping.elemental_from_mesh() - >>> # Instantiate operator - >>> op = dpf.operators.scoping.elemental_from_mesh() + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.elemental_from_mesh( + ... mesh=my_mesh, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.elemental_from_mesh(mesh=my_mesh) + >>> # Get output data + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ - >>> # Get output data - >>> result_mesh_scoping = op.outputs.mesh_scoping()""" def __init__(self, mesh=None, config=None, server=None): - super().__init__(name="GetElementScopingFromMesh", config = config, server = server) + super().__init__(name="GetElementScopingFromMesh", config=config, server=server) self._inputs = InputsElementalFromMesh(self) self._outputs = OutputsElementalFromMesh(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Get the elements ids scoping of a given input mesh.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """Get the elements ids scoping of a given input mesh.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "GetElementScopingFromMesh") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="GetElementScopingFromMesh", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsElementalFromMesh + inputs : InputsElementalFromMesh """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsElementalFromMesh + outputs : OutputsElementalFromMesh """ return super().outputs -#internal name: GetElementScopingFromMesh -#scripting name: elemental_from_mesh class InputsElementalFromMesh(_Inputs): - """Intermediate class used to connect user inputs to elemental_from_mesh operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.elemental_from_mesh() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + elemental_from_mesh operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.elemental_from_mesh() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(elemental_from_mesh._spec().inputs, op) - self._mesh = Input(elemental_from_mesh._spec().input_pin(0), 0, op, -1) + self._mesh = Input(elemental_from_mesh._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.elemental_from_mesh() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsElementalFromMesh(_Outputs): - """Intermediate class used to get outputs from elemental_from_mesh operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.elemental_from_mesh() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() + """Intermediate class used to get outputs from + elemental_from_mesh operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.elemental_from_mesh() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() """ + def __init__(self, op: Operator): super().__init__(elemental_from_mesh._spec().outputs, op) - self._mesh_scoping = Output(elemental_from_mesh._spec().output_pin(0), 0, op) + self._mesh_scoping = Output(elemental_from_mesh._spec().output_pin(0), 0, op) self._outputs.append(self._mesh_scoping) @property def mesh_scoping(self): """Allows to get mesh_scoping output of the operator - Returns ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.elemental_from_mesh() >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() - """ + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ # noqa: E501 return self._mesh_scoping - diff --git a/ansys/dpf/core/operators/scoping/from_mesh.py b/ansys/dpf/core/operators/scoping/from_mesh.py index 6f5a0144d28..959e55ced7b 100644 --- a/ansys/dpf/core/operators/scoping/from_mesh.py +++ b/ansys/dpf/core/operators/scoping/from_mesh.py @@ -1,66 +1,108 @@ """ from_mesh -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "scoping" category -""" class from_mesh(Operator): """Provides the entire mesh scoping based on the requested location - available inputs: - - mesh (MeshedRegion) - - requested_location (str) (optional) + Parameters + ---------- + mesh : MeshedRegion + requested_location : str, optional + If nothing the operator returns the nodes + scoping, possible locations are: + nodal(default) or elemental + - available outputs: - - scoping (Scoping) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.scoping.from_mesh() - >>> # Instantiate operator - >>> op = dpf.operators.scoping.from_mesh() + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.from_mesh( + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.from_mesh(mesh=my_mesh,requested_location=my_requested_location) + >>> # Get output data + >>> result_scoping = op.outputs.scoping() + """ - >>> # Get output data - >>> result_scoping = op.outputs.scoping()""" def __init__(self, mesh=None, requested_location=None, config=None, server=None): - super().__init__(name="MeshScopingProvider", config = config, server = server) + super().__init__(name="MeshScopingProvider", config=config, server=server) self._inputs = InputsFromMesh(self) self._outputs = OutputsFromMesh(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) @staticmethod def _spec(): - spec = Specification(description="""Provides the entire mesh scoping based on the requested location""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "requested_location", type_names=["string"], optional=True, document="""if nothing the operator returns the nodes scoping, possible locations are: Nodal or Elemental""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "scoping", type_names=["scoping"], optional=False, document="""""")}) + description = ( + """Provides the entire mesh scoping based on the requested location""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""If nothing the operator returns the nodes + scoping, possible locations are: + nodal(default) or elemental""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "MeshScopingProvider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="MeshScopingProvider", server=server) @property def inputs(self): @@ -68,117 +110,113 @@ def inputs(self): Returns -------- - inputs : InputsFromMesh + inputs : InputsFromMesh """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFromMesh + outputs : OutputsFromMesh """ return super().outputs -#internal name: MeshScopingProvider -#scripting name: from_mesh class InputsFromMesh(_Inputs): - """Intermediate class used to connect user inputs to from_mesh operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.from_mesh() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) + """Intermediate class used to connect user inputs to + from_mesh operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.from_mesh() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) """ + def __init__(self, op: Operator): super().__init__(from_mesh._spec().inputs, op) - self._mesh = Input(from_mesh._spec().input_pin(0), 0, op, -1) + self._mesh = Input(from_mesh._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) - self._requested_location = Input(from_mesh._spec().input_pin(1), 1, op, -1) + self._requested_location = Input(from_mesh._spec().input_pin(1), 1, op, -1) self._inputs.append(self._requested_location) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.from_mesh() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: if nothing the operator returns the nodes scoping, possible locations are: Nodal or Elemental + If nothing the operator returns the nodes + scoping, possible locations are: + nodal(default) or elemental Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.from_mesh() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location + class OutputsFromMesh(_Outputs): - """Intermediate class used to get outputs from from_mesh operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.from_mesh() - >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() + """Intermediate class used to get outputs from + from_mesh operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.from_mesh() + >>> # Connect inputs : op.inputs. ... + >>> result_scoping = op.outputs.scoping() """ + def __init__(self, op: Operator): super().__init__(from_mesh._spec().outputs, op) - self._scoping = Output(from_mesh._spec().output_pin(0), 0, op) + self._scoping = Output(from_mesh._spec().output_pin(0), 0, op) self._outputs.append(self._scoping) @property def scoping(self): """Allows to get scoping output of the operator - Returns ---------- - my_scoping : Scoping, + my_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.from_mesh() >>> # Connect inputs : op.inputs. ... - >>> result_scoping = op.outputs.scoping() - """ + >>> result_scoping = op.outputs.scoping() + """ # noqa: E501 return self._scoping - diff --git a/ansys/dpf/core/operators/scoping/intersect.py b/ansys/dpf/core/operators/scoping/intersect.py index 6136c1491c5..8eb6c446fcb 100644 --- a/ansys/dpf/core/operators/scoping/intersect.py +++ b/ansys/dpf/core/operators/scoping/intersect.py @@ -1,69 +1,110 @@ """ intersect -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class intersect(Operator): - """Intersect 2 scopings and return the intersection and the difference between the intersection and the first scoping. + """Intersect 2 scopings and return the intersection and the difference + between the intersection and the first scoping. + + Parameters + ---------- + scopingA : Scoping + scopingB : Scoping - available inputs: - - scopingA (Scoping) - - scopingB (Scoping) - available outputs: - - intersection (Scoping) - - scopingA_min_intersection (Scoping) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.scoping.intersect() - >>> # Instantiate operator - >>> op = dpf.operators.scoping.intersect() + >>> # Make input connections + >>> my_scopingA = dpf.Scoping() + >>> op.inputs.scopingA.connect(my_scopingA) + >>> my_scopingB = dpf.Scoping() + >>> op.inputs.scopingB.connect(my_scopingB) - >>> # Make input connections - >>> my_scopingA = dpf.Scoping() - >>> op.inputs.scopingA.connect(my_scopingA) - >>> my_scopingB = dpf.Scoping() - >>> op.inputs.scopingB.connect(my_scopingB) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.intersect( + ... scopingA=my_scopingA, + ... scopingB=my_scopingB, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.intersect(scopingA=my_scopingA,scopingB=my_scopingB) + >>> # Get output data + >>> result_intersection = op.outputs.intersection() + >>> result_scopingA_min_intersection = op.outputs.scopingA_min_intersection() + """ - >>> # Get output data - >>> result_intersection = op.outputs.intersection() - >>> result_scopingA_min_intersection = op.outputs.scopingA_min_intersection()""" def __init__(self, scopingA=None, scopingB=None, config=None, server=None): - super().__init__(name="scoping::intersect", config = config, server = server) + super().__init__(name="scoping::intersect", config=config, server=server) self._inputs = InputsIntersect(self) self._outputs = OutputsIntersect(self) - if scopingA !=None: + if scopingA is not None: self.inputs.scopingA.connect(scopingA) - if scopingB !=None: + if scopingB is not None: self.inputs.scopingB.connect(scopingB) @staticmethod def _spec(): - spec = Specification(description="""Intersect 2 scopings and return the intersection and the difference between the intersection and the first scoping.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "scopingA", type_names=["scoping"], optional=False, document=""""""), - 1 : PinSpecification(name = "scopingB", type_names=["scoping"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "intersection", type_names=["scoping"], optional=False, document=""""""), - 1 : PinSpecification(name = "scopingA_min_intersection", type_names=["scoping"], optional=False, document="""""")}) + description = """Intersect 2 scopings and return the intersection and the difference + between the intersection and the first scoping.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="scopingA", + type_names=["scoping"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="scopingB", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="intersection", + type_names=["scoping"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="scopingA_min_intersection", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scoping::intersect") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scoping::intersect", server=server) @property def inputs(self): @@ -71,137 +112,129 @@ def inputs(self): Returns -------- - inputs : InputsIntersect + inputs : InputsIntersect """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsIntersect + outputs : OutputsIntersect """ return super().outputs -#internal name: scoping::intersect -#scripting name: intersect class InputsIntersect(_Inputs): - """Intermediate class used to connect user inputs to intersect operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.intersect() - >>> my_scopingA = dpf.Scoping() - >>> op.inputs.scopingA.connect(my_scopingA) - >>> my_scopingB = dpf.Scoping() - >>> op.inputs.scopingB.connect(my_scopingB) + """Intermediate class used to connect user inputs to + intersect operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.intersect() + >>> my_scopingA = dpf.Scoping() + >>> op.inputs.scopingA.connect(my_scopingA) + >>> my_scopingB = dpf.Scoping() + >>> op.inputs.scopingB.connect(my_scopingB) """ + def __init__(self, op: Operator): super().__init__(intersect._spec().inputs, op) - self._scopingA = Input(intersect._spec().input_pin(0), 0, op, -1) + self._scopingA = Input(intersect._spec().input_pin(0), 0, op, -1) self._inputs.append(self._scopingA) - self._scopingB = Input(intersect._spec().input_pin(1), 1, op, -1) + self._scopingB = Input(intersect._spec().input_pin(1), 1, op, -1) self._inputs.append(self._scopingB) @property def scopingA(self): - """Allows to connect scopingA input to the operator + """Allows to connect scopingA input to the operator. Parameters ---------- - my_scopingA : Scoping, + my_scopingA : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.intersect() >>> op.inputs.scopingA.connect(my_scopingA) - >>> #or + >>> # or >>> op.inputs.scopingA(my_scopingA) - """ return self._scopingA @property def scopingB(self): - """Allows to connect scopingB input to the operator + """Allows to connect scopingB input to the operator. Parameters ---------- - my_scopingB : Scoping, + my_scopingB : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.intersect() >>> op.inputs.scopingB.connect(my_scopingB) - >>> #or + >>> # or >>> op.inputs.scopingB(my_scopingB) - """ return self._scopingB + class OutputsIntersect(_Outputs): - """Intermediate class used to get outputs from intersect operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.intersect() - >>> # Connect inputs : op.inputs. ... - >>> result_intersection = op.outputs.intersection() - >>> result_scopingA_min_intersection = op.outputs.scopingA_min_intersection() + """Intermediate class used to get outputs from + intersect operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.intersect() + >>> # Connect inputs : op.inputs. ... + >>> result_intersection = op.outputs.intersection() + >>> result_scopingA_min_intersection = op.outputs.scopingA_min_intersection() """ + def __init__(self, op: Operator): super().__init__(intersect._spec().outputs, op) - self._intersection = Output(intersect._spec().output_pin(0), 0, op) + self._intersection = Output(intersect._spec().output_pin(0), 0, op) self._outputs.append(self._intersection) - self._scopingA_min_intersection = Output(intersect._spec().output_pin(1), 1, op) + self._scopingA_min_intersection = Output(intersect._spec().output_pin(1), 1, op) self._outputs.append(self._scopingA_min_intersection) @property def intersection(self): """Allows to get intersection output of the operator - Returns ---------- - my_intersection : Scoping, + my_intersection : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.intersect() >>> # Connect inputs : op.inputs. ... - >>> result_intersection = op.outputs.intersection() - """ + >>> result_intersection = op.outputs.intersection() + """ # noqa: E501 return self._intersection @property def scopingA_min_intersection(self): """Allows to get scopingA_min_intersection output of the operator - Returns ---------- - my_scopingA_min_intersection : Scoping, + my_scopingA_min_intersection : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.intersect() >>> # Connect inputs : op.inputs. ... - >>> result_scopingA_min_intersection = op.outputs.scopingA_min_intersection() - """ + >>> result_scopingA_min_intersection = op.outputs.scopingA_min_intersection() + """ # noqa: E501 return self._scopingA_min_intersection - diff --git a/ansys/dpf/core/operators/scoping/nodal_from_mesh.py b/ansys/dpf/core/operators/scoping/nodal_from_mesh.py index 0c3dee58a85..659754d93d9 100644 --- a/ansys/dpf/core/operators/scoping/nodal_from_mesh.py +++ b/ansys/dpf/core/operators/scoping/nodal_from_mesh.py @@ -1,60 +1,89 @@ """ nodal_from_mesh =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class nodal_from_mesh(Operator): """Get the nodes ids scoping of an input mesh. - available inputs: - - mesh (MeshedRegion) + Parameters + ---------- + mesh : MeshedRegion + - available outputs: - - mesh_scoping (Scoping) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.scoping.nodal_from_mesh() - >>> # Instantiate operator - >>> op = dpf.operators.scoping.nodal_from_mesh() + >>> # Make input connections + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.nodal_from_mesh( + ... mesh=my_mesh, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.nodal_from_mesh(mesh=my_mesh) + >>> # Get output data + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ - >>> # Get output data - >>> result_mesh_scoping = op.outputs.mesh_scoping()""" def __init__(self, mesh=None, config=None, server=None): - super().__init__(name="GetNodeScopingFromMesh", config = config, server = server) + super().__init__(name="GetNodeScopingFromMesh", config=config, server=server) self._inputs = InputsNodalFromMesh(self) self._outputs = OutputsNodalFromMesh(self) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - spec = Specification(description="""Get the nodes ids scoping of an input mesh.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """Get the nodes ids scoping of an input mesh.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "GetNodeScopingFromMesh") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="GetNodeScopingFromMesh", server=server) @property def inputs(self): @@ -62,91 +91,87 @@ def inputs(self): Returns -------- - inputs : InputsNodalFromMesh + inputs : InputsNodalFromMesh """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsNodalFromMesh + outputs : OutputsNodalFromMesh """ return super().outputs -#internal name: GetNodeScopingFromMesh -#scripting name: nodal_from_mesh class InputsNodalFromMesh(_Inputs): - """Intermediate class used to connect user inputs to nodal_from_mesh operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.nodal_from_mesh() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) + """Intermediate class used to connect user inputs to + nodal_from_mesh operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.nodal_from_mesh() + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ + def __init__(self, op: Operator): super().__init__(nodal_from_mesh._spec().inputs, op) - self._mesh = Input(nodal_from_mesh._spec().input_pin(0), 0, op, -1) + self._mesh = Input(nodal_from_mesh._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh) @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.nodal_from_mesh() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh + class OutputsNodalFromMesh(_Outputs): - """Intermediate class used to get outputs from nodal_from_mesh operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.nodal_from_mesh() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() + """Intermediate class used to get outputs from + nodal_from_mesh operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.nodal_from_mesh() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() """ + def __init__(self, op: Operator): super().__init__(nodal_from_mesh._spec().outputs, op) - self._mesh_scoping = Output(nodal_from_mesh._spec().output_pin(0), 0, op) + self._mesh_scoping = Output(nodal_from_mesh._spec().output_pin(0), 0, op) self._outputs.append(self._mesh_scoping) @property def mesh_scoping(self): """Allows to get mesh_scoping output of the operator - Returns ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.nodal_from_mesh() >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() - """ + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ # noqa: E501 return self._mesh_scoping - diff --git a/ansys/dpf/core/operators/scoping/on_mesh_property.py b/ansys/dpf/core/operators/scoping/on_mesh_property.py new file mode 100644 index 00000000000..5695ad5ab37 --- /dev/null +++ b/ansys/dpf/core/operators/scoping/on_mesh_property.py @@ -0,0 +1,369 @@ +""" +on_mesh_property +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class on_mesh_property(Operator): + """Provides a scoping on a given property name and a property number. + + Parameters + ---------- + requested_location : str, optional + Nodal or elemental location are expected + property_name : str + Ex "apdl_element_type", "elprops", "mat", + "eltype", "connectivity", + "shell_elements", "solid_elements", + "skin_elements", "beam_elements", + "point_elements"... + property_id : int, optional + inclusive : int, optional + If element scoping is requested on a nodal + named selection, if inclusive == 1 + then all the elements adjacent to the + nodes ids in input are added, if + inclusive == 0, only the elements + which have all their nodes in the + scoping are included + mesh : MeshedRegion + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.scoping.on_mesh_property() + + >>> # Make input connections + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_property_name = str() + >>> op.inputs.property_name.connect(my_property_name) + >>> my_property_id = int() + >>> op.inputs.property_id.connect(my_property_id) + >>> my_inclusive = int() + >>> op.inputs.inclusive.connect(my_inclusive) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.on_mesh_property( + ... requested_location=my_requested_location, + ... property_name=my_property_name, + ... property_id=my_property_id, + ... inclusive=my_inclusive, + ... mesh=my_mesh, + ... ) + + >>> # Get output data + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ + + def __init__( + self, + requested_location=None, + property_name=None, + property_id=None, + inclusive=None, + mesh=None, + config=None, + server=None, + ): + super().__init__( + name="meshscoping_provider_by_prop", config=config, server=server + ) + self._inputs = InputsOnMeshProperty(self) + self._outputs = OutputsOnMeshProperty(self) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if property_name is not None: + self.inputs.property_name.connect(property_name) + if property_id is not None: + self.inputs.property_id.connect(property_id) + if inclusive is not None: + self.inputs.inclusive.connect(inclusive) + if mesh is not None: + self.inputs.mesh.connect(mesh) + + @staticmethod + def _spec(): + description = ( + """Provides a scoping on a given property name and a property number.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="requested_location", + type_names=["string"], + optional=True, + document="""Nodal or elemental location are expected""", + ), + 1: PinSpecification( + name="property_name", + type_names=["string"], + optional=False, + document="""Ex "apdl_element_type", "elprops", "mat", + "eltype", "connectivity", + "shell_elements", "solid_elements", + "skin_elements", "beam_elements", + "point_elements"...""", + ), + 2: PinSpecification( + name="property_id", + type_names=["int32"], + optional=True, + document="""""", + ), + 5: PinSpecification( + name="inclusive", + type_names=["int32"], + optional=True, + document="""If element scoping is requested on a nodal + named selection, if inclusive == 1 + then all the elements adjacent to the + nodes ids in input are added, if + inclusive == 0, only the elements + which have all their nodes in the + scoping are included""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=False, + document="""Scoping""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="meshscoping_provider_by_prop", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsOnMeshProperty + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsOnMeshProperty + """ + return super().outputs + + +class InputsOnMeshProperty(_Inputs): + """Intermediate class used to connect user inputs to + on_mesh_property operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_mesh_property() + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_property_name = str() + >>> op.inputs.property_name.connect(my_property_name) + >>> my_property_id = int() + >>> op.inputs.property_id.connect(my_property_id) + >>> my_inclusive = int() + >>> op.inputs.inclusive.connect(my_inclusive) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + """ + + def __init__(self, op: Operator): + super().__init__(on_mesh_property._spec().inputs, op) + self._requested_location = Input( + on_mesh_property._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._requested_location) + self._property_name = Input(on_mesh_property._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._property_name) + self._property_id = Input(on_mesh_property._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._property_id) + self._inclusive = Input(on_mesh_property._spec().input_pin(5), 5, op, -1) + self._inputs.append(self._inclusive) + self._mesh = Input(on_mesh_property._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Nodal or elemental location are expected + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_mesh_property() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def property_name(self): + """Allows to connect property_name input to the operator. + + Ex "apdl_element_type", "elprops", "mat", + "eltype", "connectivity", + "shell_elements", "solid_elements", + "skin_elements", "beam_elements", + "point_elements"... + + Parameters + ---------- + my_property_name : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_mesh_property() + >>> op.inputs.property_name.connect(my_property_name) + >>> # or + >>> op.inputs.property_name(my_property_name) + """ + return self._property_name + + @property + def property_id(self): + """Allows to connect property_id input to the operator. + + Parameters + ---------- + my_property_id : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_mesh_property() + >>> op.inputs.property_id.connect(my_property_id) + >>> # or + >>> op.inputs.property_id(my_property_id) + """ + return self._property_id + + @property + def inclusive(self): + """Allows to connect inclusive input to the operator. + + If element scoping is requested on a nodal + named selection, if inclusive == 1 + then all the elements adjacent to the + nodes ids in input are added, if + inclusive == 0, only the elements + which have all their nodes in the + scoping are included + + Parameters + ---------- + my_inclusive : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_mesh_property() + >>> op.inputs.inclusive.connect(my_inclusive) + >>> # or + >>> op.inputs.inclusive(my_inclusive) + """ + return self._inclusive + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_mesh_property() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + +class OutputsOnMeshProperty(_Outputs): + """Intermediate class used to get outputs from + on_mesh_property operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_mesh_property() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ + + def __init__(self, op: Operator): + super().__init__(on_mesh_property._spec().outputs, op) + self._mesh_scoping = Output(on_mesh_property._spec().output_pin(0), 0, op) + self._outputs.append(self._mesh_scoping) + + @property + def mesh_scoping(self): + """Allows to get mesh_scoping output of the operator + + Returns + ---------- + my_mesh_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_mesh_property() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ # noqa: E501 + return self._mesh_scoping diff --git a/ansys/dpf/core/operators/scoping/on_named_selection.py b/ansys/dpf/core/operators/scoping/on_named_selection.py index bfffd4038c1..a14c7ed641c 100644 --- a/ansys/dpf/core/operators/scoping/on_named_selection.py +++ b/ansys/dpf/core/operators/scoping/on_named_selection.py @@ -1,84 +1,160 @@ """ on_named_selection -================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class on_named_selection(Operator): - """provides a scoping at a given location based on a given named selection - - available inputs: - - requested_location (str) - - named_selection_name (str) - - int_inclusive (int) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - available outputs: - - mesh_scoping (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.scoping.on_named_selection() - - >>> # Make input connections - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_named_selection_name = str() - >>> op.inputs.named_selection_name.connect(my_named_selection_name) - >>> my_int_inclusive = int() - >>> op.inputs.int_inclusive.connect(my_int_inclusive) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.on_named_selection(requested_location=my_requested_location,named_selection_name=my_named_selection_name,int_inclusive=my_int_inclusive,streams_container=my_streams_container,data_sources=my_data_sources) - - >>> # Get output data - >>> result_mesh_scoping = op.outputs.mesh_scoping()""" - def __init__(self, requested_location=None, named_selection_name=None, int_inclusive=None, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="scoping_provider_by_ns", config = config, server = server) + """provides a scoping at a given location based on a given named + selection + + Parameters + ---------- + requested_location : str + named_selection_name : str + The string is expected to be in upper case + int_inclusive : int, optional + If element scoping is requested on a nodal + named selection, if inclusive == 1 + then add all the elements adjacent to + the nodes.if inclusive == 0, only the + elements which have all their nodes + in the named selection are included + streams_container : StreamsContainer, optional + data_sources : DataSources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.scoping.on_named_selection() + + >>> # Make input connections + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_named_selection_name = str() + >>> op.inputs.named_selection_name.connect(my_named_selection_name) + >>> my_int_inclusive = int() + >>> op.inputs.int_inclusive.connect(my_int_inclusive) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.on_named_selection( + ... requested_location=my_requested_location, + ... named_selection_name=my_named_selection_name, + ... int_inclusive=my_int_inclusive, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ + + def __init__( + self, + requested_location=None, + named_selection_name=None, + int_inclusive=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__(name="scoping_provider_by_ns", config=config, server=server) self._inputs = InputsOnNamedSelection(self) self._outputs = OutputsOnNamedSelection(self) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) - if named_selection_name !=None: + if named_selection_name is not None: self.inputs.named_selection_name.connect(named_selection_name) - if int_inclusive !=None: + if int_inclusive is not None: self.inputs.int_inclusive.connect(int_inclusive) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""provides a scoping at a given location based on a given named selection""", - map_input_pin_spec={ - 0 : PinSpecification(name = "requested_location", type_names=["string"], optional=False, document=""""""), - 1 : PinSpecification(name = "named_selection_name", type_names=["string"], optional=False, document="""the string is expected to be in upper case"""), - 2 : PinSpecification(name = "int_inclusive", type_names=["int32"], optional=True, document="""If element scoping is requested on a nodal named selection, if Inclusive == 1 then add all the elements adjacent to the nodes.If Inclusive == 0, only the elements which have all their nodes in the named selection are included"""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=False, document="""""")}) + description = """provides a scoping at a given location based on a given named + selection""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="requested_location", + type_names=["string"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="named_selection_name", + type_names=["string"], + optional=False, + document="""The string is expected to be in upper case""", + ), + 2: PinSpecification( + name="int_inclusive", + type_names=["int32"], + optional=True, + document="""If element scoping is requested on a nodal + named selection, if inclusive == 1 + then add all the elements adjacent to + the nodes.if inclusive == 0, only the + elements which have all their nodes + in the named selection are included""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scoping_provider_by_ns") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scoping_provider_by_ns", server=server) @property def inputs(self): @@ -86,191 +162,190 @@ def inputs(self): Returns -------- - inputs : InputsOnNamedSelection + inputs : InputsOnNamedSelection """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsOnNamedSelection + outputs : OutputsOnNamedSelection """ return super().outputs -#internal name: scoping_provider_by_ns -#scripting name: on_named_selection class InputsOnNamedSelection(_Inputs): - """Intermediate class used to connect user inputs to on_named_selection operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.on_named_selection() - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_named_selection_name = str() - >>> op.inputs.named_selection_name.connect(my_named_selection_name) - >>> my_int_inclusive = int() - >>> op.inputs.int_inclusive.connect(my_int_inclusive) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + on_named_selection operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_named_selection() + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_named_selection_name = str() + >>> op.inputs.named_selection_name.connect(my_named_selection_name) + >>> my_int_inclusive = int() + >>> op.inputs.int_inclusive.connect(my_int_inclusive) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(on_named_selection._spec().inputs, op) - self._requested_location = Input(on_named_selection._spec().input_pin(0), 0, op, -1) + self._requested_location = Input( + on_named_selection._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._requested_location) - self._named_selection_name = Input(on_named_selection._spec().input_pin(1), 1, op, -1) + self._named_selection_name = Input( + on_named_selection._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._named_selection_name) - self._int_inclusive = Input(on_named_selection._spec().input_pin(2), 2, op, -1) + self._int_inclusive = Input(on_named_selection._spec().input_pin(2), 2, op, -1) self._inputs.append(self._int_inclusive) - self._streams_container = Input(on_named_selection._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + on_named_selection._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(on_named_selection._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(on_named_selection._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_named_selection() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def named_selection_name(self): - """Allows to connect named_selection_name input to the operator + """Allows to connect named_selection_name input to the operator. - - pindoc: the string is expected to be in upper case + The string is expected to be in upper case Parameters ---------- - my_named_selection_name : str, + my_named_selection_name : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_named_selection() >>> op.inputs.named_selection_name.connect(my_named_selection_name) - >>> #or + >>> # or >>> op.inputs.named_selection_name(my_named_selection_name) - """ return self._named_selection_name @property def int_inclusive(self): - """Allows to connect int_inclusive input to the operator + """Allows to connect int_inclusive input to the operator. - - pindoc: If element scoping is requested on a nodal named selection, if Inclusive == 1 then add all the elements adjacent to the nodes.If Inclusive == 0, only the elements which have all their nodes in the named selection are included + If element scoping is requested on a nodal + named selection, if inclusive == 1 + then add all the elements adjacent to + the nodes.if inclusive == 0, only the + elements which have all their nodes + in the named selection are included Parameters ---------- - my_int_inclusive : int, + my_int_inclusive : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_named_selection() >>> op.inputs.int_inclusive.connect(my_int_inclusive) - >>> #or + >>> # or >>> op.inputs.int_inclusive(my_int_inclusive) - """ return self._int_inclusive @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_named_selection() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_named_selection() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsOnNamedSelection(_Outputs): - """Intermediate class used to get outputs from on_named_selection operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.on_named_selection() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() + """Intermediate class used to get outputs from + on_named_selection operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_named_selection() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() """ + def __init__(self, op: Operator): super().__init__(on_named_selection._spec().outputs, op) - self._mesh_scoping = Output(on_named_selection._spec().output_pin(0), 0, op) + self._mesh_scoping = Output(on_named_selection._spec().output_pin(0), 0, op) self._outputs.append(self._mesh_scoping) @property def mesh_scoping(self): """Allows to get mesh_scoping output of the operator - Returns ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_named_selection() >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() - """ + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ # noqa: E501 return self._mesh_scoping - diff --git a/ansys/dpf/core/operators/scoping/on_property.py b/ansys/dpf/core/operators/scoping/on_property.py index d0678bd4257..d7eab45a7c1 100644 --- a/ansys/dpf/core/operators/scoping/on_property.py +++ b/ansys/dpf/core/operators/scoping/on_property.py @@ -1,90 +1,182 @@ """ on_property -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class on_property(Operator): - """Provides a scoping at a given location based on a given property name and a property number. - - available inputs: - - requested_location (str) - - property_name (str) - - property_id (int) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) - - inclusive (int) (optional) - - available outputs: - - mesh_scoping (Scoping) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.scoping.on_property() - - >>> # Make input connections - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_property_name = str() - >>> op.inputs.property_name.connect(my_property_name) - >>> my_property_id = int() - >>> op.inputs.property_id.connect(my_property_id) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_inclusive = int() - >>> op.inputs.inclusive.connect(my_inclusive) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.on_property(requested_location=my_requested_location,property_name=my_property_name,property_id=my_property_id,streams_container=my_streams_container,data_sources=my_data_sources,inclusive=my_inclusive) - - >>> # Get output data - >>> result_mesh_scoping = op.outputs.mesh_scoping()""" - def __init__(self, requested_location=None, property_name=None, property_id=None, streams_container=None, data_sources=None, inclusive=None, config=None, server=None): - super().__init__(name="scoping_provider_by_prop", config = config, server = server) + """Provides a scoping at a given location based on a given property name + and a property number. + + Parameters + ---------- + requested_location : str + Nodal or elemental location are expected + property_name : str + Ex "mapdl_element_type", "apdl_type_index", + "mapdl_type_id", "material", + "apdl_section_id", "apdl_real_id", + "shell_axi", "volume_axi"... + property_id : int + streams_container : StreamsContainer, optional + data_sources : DataSources + inclusive : int, optional + If element scoping is requested on a nodal + named selection, if inclusive == 1 + then all the elements adjacent to the + nodes ids in input are added, if + inclusive == 0, only the elements + which have all their nodes in the + scoping are included + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.scoping.on_property() + + >>> # Make input connections + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_property_name = str() + >>> op.inputs.property_name.connect(my_property_name) + >>> my_property_id = int() + >>> op.inputs.property_id.connect(my_property_id) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_inclusive = int() + >>> op.inputs.inclusive.connect(my_inclusive) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.on_property( + ... requested_location=my_requested_location, + ... property_name=my_property_name, + ... property_id=my_property_id, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... inclusive=my_inclusive, + ... ) + + >>> # Get output data + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ + + def __init__( + self, + requested_location=None, + property_name=None, + property_id=None, + streams_container=None, + data_sources=None, + inclusive=None, + config=None, + server=None, + ): + super().__init__(name="scoping_provider_by_prop", config=config, server=server) self._inputs = InputsOnProperty(self) self._outputs = OutputsOnProperty(self) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) - if property_name !=None: + if property_name is not None: self.inputs.property_name.connect(property_name) - if property_id !=None: + if property_id is not None: self.inputs.property_id.connect(property_id) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if inclusive !=None: + if inclusive is not None: self.inputs.inclusive.connect(inclusive) @staticmethod def _spec(): - spec = Specification(description="""Provides a scoping at a given location based on a given property name and a property number.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "requested_location", type_names=["string"], optional=False, document="""Nodal or Elemental location are expected"""), - 1 : PinSpecification(name = "property_name", type_names=["string"], optional=False, document="""ex "mapdl_element_type", "apdl_type_index", "mapdl_type_id", "material", "apdl_section_id", "apdl_real_id", "shell_axi", "volume_axi"..."""), - 2 : PinSpecification(name = "property_id", type_names=["int32"], optional=False, document=""""""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document=""""""), - 5 : PinSpecification(name = "inclusive", type_names=["int32"], optional=True, document="""If element scoping is requested on a nodal named selection, if inclusive == 1 then all the elements adjacent to the nodes ids in input are added, if inclusive == 0, only the elements which have all their nodes in the scoping are included""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=False, document="""Scoping""")}) + description = """Provides a scoping at a given location based on a given property name + and a property number.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="requested_location", + type_names=["string"], + optional=False, + document="""Nodal or elemental location are expected""", + ), + 1: PinSpecification( + name="property_name", + type_names=["string"], + optional=False, + document="""Ex "mapdl_element_type", "apdl_type_index", + "mapdl_type_id", "material", + "apdl_section_id", "apdl_real_id", + "shell_axi", "volume_axi"...""", + ), + 2: PinSpecification( + name="property_id", + type_names=["int32"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + 5: PinSpecification( + name="inclusive", + type_names=["int32"], + optional=True, + document="""If element scoping is requested on a nodal + named selection, if inclusive == 1 + then all the elements adjacent to the + nodes ids in input are added, if + inclusive == 0, only the elements + which have all their nodes in the + scoping are included""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=False, + document="""Scoping""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "scoping_provider_by_prop") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scoping_provider_by_prop", server=server) @property def inputs(self): @@ -92,219 +184,212 @@ def inputs(self): Returns -------- - inputs : InputsOnProperty + inputs : InputsOnProperty """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsOnProperty + outputs : OutputsOnProperty """ return super().outputs -#internal name: scoping_provider_by_prop -#scripting name: on_property class InputsOnProperty(_Inputs): - """Intermediate class used to connect user inputs to on_property operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.on_property() - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_property_name = str() - >>> op.inputs.property_name.connect(my_property_name) - >>> my_property_id = int() - >>> op.inputs.property_id.connect(my_property_id) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_inclusive = int() - >>> op.inputs.inclusive.connect(my_inclusive) + """Intermediate class used to connect user inputs to + on_property operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_property() + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_property_name = str() + >>> op.inputs.property_name.connect(my_property_name) + >>> my_property_id = int() + >>> op.inputs.property_id.connect(my_property_id) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_inclusive = int() + >>> op.inputs.inclusive.connect(my_inclusive) """ + def __init__(self, op: Operator): super().__init__(on_property._spec().inputs, op) - self._requested_location = Input(on_property._spec().input_pin(0), 0, op, -1) + self._requested_location = Input(on_property._spec().input_pin(0), 0, op, -1) self._inputs.append(self._requested_location) - self._property_name = Input(on_property._spec().input_pin(1), 1, op, -1) + self._property_name = Input(on_property._spec().input_pin(1), 1, op, -1) self._inputs.append(self._property_name) - self._property_id = Input(on_property._spec().input_pin(2), 2, op, -1) + self._property_id = Input(on_property._spec().input_pin(2), 2, op, -1) self._inputs.append(self._property_id) - self._streams_container = Input(on_property._spec().input_pin(3), 3, op, -1) + self._streams_container = Input(on_property._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams_container) - self._data_sources = Input(on_property._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(on_property._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) - self._inclusive = Input(on_property._spec().input_pin(5), 5, op, -1) + self._inclusive = Input(on_property._spec().input_pin(5), 5, op, -1) self._inputs.append(self._inclusive) @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. - - pindoc: Nodal or Elemental location are expected + Nodal or elemental location are expected Parameters ---------- - my_requested_location : str, + my_requested_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_property() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location @property def property_name(self): - """Allows to connect property_name input to the operator + """Allows to connect property_name input to the operator. - - pindoc: ex "mapdl_element_type", "apdl_type_index", "mapdl_type_id", "material", "apdl_section_id", "apdl_real_id", "shell_axi", "volume_axi"... + Ex "mapdl_element_type", "apdl_type_index", + "mapdl_type_id", "material", + "apdl_section_id", "apdl_real_id", + "shell_axi", "volume_axi"... Parameters ---------- - my_property_name : str, + my_property_name : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_property() >>> op.inputs.property_name.connect(my_property_name) - >>> #or + >>> # or >>> op.inputs.property_name(my_property_name) - """ return self._property_name @property def property_id(self): - """Allows to connect property_id input to the operator + """Allows to connect property_id input to the operator. Parameters ---------- - my_property_id : int, + my_property_id : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_property() >>> op.inputs.property_id.connect(my_property_id) - >>> #or + >>> # or >>> op.inputs.property_id(my_property_id) - """ return self._property_id @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_property() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_property() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def inclusive(self): - """Allows to connect inclusive input to the operator + """Allows to connect inclusive input to the operator. - - pindoc: If element scoping is requested on a nodal named selection, if inclusive == 1 then all the elements adjacent to the nodes ids in input are added, if inclusive == 0, only the elements which have all their nodes in the scoping are included + If element scoping is requested on a nodal + named selection, if inclusive == 1 + then all the elements adjacent to the + nodes ids in input are added, if + inclusive == 0, only the elements + which have all their nodes in the + scoping are included Parameters ---------- - my_inclusive : int, + my_inclusive : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_property() >>> op.inputs.inclusive.connect(my_inclusive) - >>> #or + >>> # or >>> op.inputs.inclusive(my_inclusive) - """ return self._inclusive + class OutputsOnProperty(_Outputs): - """Intermediate class used to get outputs from on_property operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.on_property() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() + """Intermediate class used to get outputs from + on_property operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.on_property() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() """ + def __init__(self, op: Operator): super().__init__(on_property._spec().outputs, op) - self._mesh_scoping = Output(on_property._spec().output_pin(0), 0, op) + self._mesh_scoping = Output(on_property._spec().output_pin(0), 0, op) self._outputs.append(self._mesh_scoping) @property def mesh_scoping(self): """Allows to get mesh_scoping output of the operator - - - pindoc: Scoping - Returns ---------- - my_mesh_scoping : Scoping, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.on_property() >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() - """ + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ # noqa: E501 return self._mesh_scoping - diff --git a/ansys/dpf/core/operators/scoping/rescope.py b/ansys/dpf/core/operators/scoping/rescope.py index 0c7a7b4a7da..4666e824553 100644 --- a/ansys/dpf/core/operators/scoping/rescope.py +++ b/ansys/dpf/core/operators/scoping/rescope.py @@ -1,72 +1,128 @@ """ rescope -======= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class rescope(Operator): - """Rescope a field on the given scoping. If an id does not exists in the original field, default value (in 2) is used if defined. - - available inputs: - - fields (FieldsContainer, Field) - - mesh_scoping (Scoping, list) - - default_value (float, list) - - available outputs: - - fields (FieldsContainer ,Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.scoping.rescope() - - >>> # Make input connections - >>> my_fields = dpf.FieldsContainer() - >>> op.inputs.fields.connect(my_fields) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_default_value = float() - >>> op.inputs.default_value.connect(my_default_value) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.rescope(fields=my_fields,mesh_scoping=my_mesh_scoping,default_value=my_default_value) - - >>> # Get output data - >>> result_fields = op.outputs.fields()""" - def __init__(self, fields=None, mesh_scoping=None, default_value=None, config=None, server=None): - super().__init__(name="Rescope", config = config, server = server) + """Rescope a field on the given scoping. If an id does not exists in the + original field, default value (in 2) is used if defined. + + Parameters + ---------- + fields : FieldsContainer or Field + mesh_scoping : Scoping + default_value : float + If a the pin 2 is used, the ids not found in + the fields are added with this + default value + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.scoping.rescope() + + >>> # Make input connections + >>> my_fields = dpf.FieldsContainer() + >>> op.inputs.fields.connect(my_fields) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_default_value = float() + >>> op.inputs.default_value.connect(my_default_value) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.rescope( + ... fields=my_fields, + ... mesh_scoping=my_mesh_scoping, + ... default_value=my_default_value, + ... ) + + >>> # Get output data + >>> result_fields = op.outputs.fields() + """ + + def __init__( + self, + fields=None, + mesh_scoping=None, + default_value=None, + config=None, + server=None, + ): + super().__init__(name="Rescope", config=config, server=server) self._inputs = InputsRescope(self) self._outputs = OutputsRescope(self) - if fields !=None: + if fields is not None: self.inputs.fields.connect(fields) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if default_value !=None: + if default_value is not None: self.inputs.default_value.connect(default_value) @staticmethod def _spec(): - spec = Specification(description="""Rescope a field on the given scoping. If an id does not exists in the original field, default value (in 2) is used if defined.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields", type_names=["fields_container","field"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping","vector"], optional=False, document=""""""), - 2 : PinSpecification(name = "default_value", type_names=["double","vector"], optional=False, document="""if a the pin 2 is used, the ids not found in the fields are added with this default value""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields", type_names=["fields_container","field"], optional=False, document="""""")}) + description = """Rescope a field on the given scoping. If an id does not exists in the + original field, default value (in 2) is used if defined.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields", + type_names=["fields_container", "field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping", "vector"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="default_value", + type_names=["double", "vector"], + optional=False, + document="""If a the pin 2 is used, the ids not found in + the fields are added with this + default value""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields", + type_names=["fields_container", "field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "Rescope") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="Rescope", server=server) @property def inputs(self): @@ -74,124 +130,130 @@ def inputs(self): Returns -------- - inputs : InputsRescope + inputs : InputsRescope """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRescope + outputs : OutputsRescope """ return super().outputs -#internal name: Rescope -#scripting name: rescope class InputsRescope(_Inputs): - """Intermediate class used to connect user inputs to rescope operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.rescope() - >>> my_fields = dpf.FieldsContainer() - >>> op.inputs.fields.connect(my_fields) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_default_value = float() - >>> op.inputs.default_value.connect(my_default_value) + """Intermediate class used to connect user inputs to + rescope operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.rescope() + >>> my_fields = dpf.FieldsContainer() + >>> op.inputs.fields.connect(my_fields) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_default_value = float() + >>> op.inputs.default_value.connect(my_default_value) """ + def __init__(self, op: Operator): super().__init__(rescope._spec().inputs, op) - self._fields = Input(rescope._spec().input_pin(0), 0, op, -1) + self._fields = Input(rescope._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields) - self._mesh_scoping = Input(rescope._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(rescope._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._default_value = Input(rescope._spec().input_pin(2), 2, op, -1) + self._default_value = Input(rescope._spec().input_pin(2), 2, op, -1) self._inputs.append(self._default_value) @property def fields(self): - """Allows to connect fields input to the operator + """Allows to connect fields input to the operator. Parameters ---------- - my_fields : FieldsContainer, Field, + my_fields : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.rescope() >>> op.inputs.fields.connect(my_fields) - >>> #or + >>> # or >>> op.inputs.fields(my_fields) - """ return self._fields @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, list, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.rescope() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def default_value(self): - """Allows to connect default_value input to the operator + """Allows to connect default_value input to the operator. - - pindoc: if a the pin 2 is used, the ids not found in the fields are added with this default value + If a the pin 2 is used, the ids not found in + the fields are added with this + default value Parameters ---------- - my_default_value : float, list, + my_default_value : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.rescope() >>> op.inputs.default_value.connect(my_default_value) - >>> #or + >>> # or >>> op.inputs.default_value(my_default_value) - """ return self._default_value + class OutputsRescope(_Outputs): - """Intermediate class used to get outputs from rescope operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.rescope() - >>> # Connect inputs : op.inputs. ... - >>> result_fields = op.outputs.fields() + """Intermediate class used to get outputs from + rescope operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.rescope() + >>> # Connect inputs : op.inputs. ... + >>> result_fields = op.outputs.fields() """ + def __init__(self, op: Operator): super().__init__(rescope._spec().outputs, op) - self.fields_as_fields_container = Output( _modify_output_spec_with_one_type(rescope._spec().output_pin(0), "fields_container"), 0, op) + self.fields_as_fields_container = Output( + _modify_output_spec_with_one_type( + rescope._spec().output_pin(0), "fields_container" + ), + 0, + op, + ) self._outputs.append(self.fields_as_fields_container) - self.fields_as_field = Output( _modify_output_spec_with_one_type(rescope._spec().output_pin(0), "field"), 0, op) + self.fields_as_field = Output( + _modify_output_spec_with_one_type(rescope._spec().output_pin(0), "field"), + 0, + op, + ) self._outputs.append(self.fields_as_field) - diff --git a/ansys/dpf/core/operators/scoping/rescope_fc.py b/ansys/dpf/core/operators/scoping/rescope_fc.py index 9a802a41ad4..61e6159808b 100644 --- a/ansys/dpf/core/operators/scoping/rescope_fc.py +++ b/ansys/dpf/core/operators/scoping/rescope_fc.py @@ -1,72 +1,127 @@ """ rescope_fc -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class rescope_fc(Operator): - """Rescope a field on the given scoping. If an id does not exists in the original field, default value (in 2) is used if defined. - - available inputs: - - fields_container (FieldsContainer) - - mesh_scoping (Scoping, list) - - default_value (float, list) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.scoping.rescope_fc() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_default_value = float() - >>> op.inputs.default_value.connect(my_default_value) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.rescope_fc(fields_container=my_fields_container,mesh_scoping=my_mesh_scoping,default_value=my_default_value) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, mesh_scoping=None, default_value=None, config=None, server=None): - super().__init__(name="Rescope_fc", config = config, server = server) + """Rescope a field on the given scoping. If an id does not exists in the + original field, default value (in 2) is used if defined. + + Parameters + ---------- + fields_container : FieldsContainer + mesh_scoping : Scoping + default_value : float + If a the pin 2 is used, the ids not found in + the fields are added with this + default value + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.scoping.rescope_fc() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_default_value = float() + >>> op.inputs.default_value.connect(my_default_value) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.rescope_fc( + ... fields_container=my_fields_container, + ... mesh_scoping=my_mesh_scoping, + ... default_value=my_default_value, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + fields_container=None, + mesh_scoping=None, + default_value=None, + config=None, + server=None, + ): + super().__init__(name="Rescope_fc", config=config, server=server) self._inputs = InputsRescopeFc(self) self._outputs = OutputsRescopeFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if default_value !=None: + if default_value is not None: self.inputs.default_value.connect(default_value) @staticmethod def _spec(): - spec = Specification(description="""Rescope a field on the given scoping. If an id does not exists in the original field, default value (in 2) is used if defined.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping","vector"], optional=False, document=""""""), - 2 : PinSpecification(name = "default_value", type_names=["double","vector"], optional=False, document="""if a the pin 2 is used, the ids not found in the fields are added with this default value""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Rescope a field on the given scoping. If an id does not exists in the + original field, default value (in 2) is used if defined.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping", "vector"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="default_value", + type_names=["double", "vector"], + optional=False, + document="""If a the pin 2 is used, the ids not found in + the fields are added with this + default value""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "Rescope_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="Rescope_fc", server=server) @property def inputs(self): @@ -74,141 +129,135 @@ def inputs(self): Returns -------- - inputs : InputsRescopeFc + inputs : InputsRescopeFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsRescopeFc + outputs : OutputsRescopeFc """ return super().outputs -#internal name: Rescope_fc -#scripting name: rescope_fc class InputsRescopeFc(_Inputs): - """Intermediate class used to connect user inputs to rescope_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.rescope_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_default_value = float() - >>> op.inputs.default_value.connect(my_default_value) + """Intermediate class used to connect user inputs to + rescope_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.rescope_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_default_value = float() + >>> op.inputs.default_value.connect(my_default_value) """ + def __init__(self, op: Operator): super().__init__(rescope_fc._spec().inputs, op) - self._fields_container = Input(rescope_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(rescope_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._mesh_scoping = Input(rescope_fc._spec().input_pin(1), 1, op, -1) + self._mesh_scoping = Input(rescope_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh_scoping) - self._default_value = Input(rescope_fc._spec().input_pin(2), 2, op, -1) + self._default_value = Input(rescope_fc._spec().input_pin(2), 2, op, -1) self._inputs.append(self._default_value) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.rescope_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. Parameters ---------- - my_mesh_scoping : Scoping, list, + my_mesh_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.rescope_fc() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def default_value(self): - """Allows to connect default_value input to the operator + """Allows to connect default_value input to the operator. - - pindoc: if a the pin 2 is used, the ids not found in the fields are added with this default value + If a the pin 2 is used, the ids not found in + the fields are added with this + default value Parameters ---------- - my_default_value : float, list, + my_default_value : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.rescope_fc() >>> op.inputs.default_value.connect(my_default_value) - >>> #or + >>> # or >>> op.inputs.default_value(my_default_value) - """ return self._default_value + class OutputsRescopeFc(_Outputs): - """Intermediate class used to get outputs from rescope_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.rescope_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + rescope_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.rescope_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(rescope_fc._spec().outputs, op) - self._fields_container = Output(rescope_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(rescope_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.rescope_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/scoping/split_on_property_type.py b/ansys/dpf/core/operators/scoping/split_on_property_type.py new file mode 100644 index 00000000000..91aec69011e --- /dev/null +++ b/ansys/dpf/core/operators/scoping/split_on_property_type.py @@ -0,0 +1,357 @@ +""" +split_on_property_type +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class split_on_property_type(Operator): + """Splits a given scoping or the mesh scoping (nodal or elemental) on + given properties (elshape and/or material) and returns a scopings + container with those split scopings. + + Parameters + ---------- + mesh_scoping : Scoping, optional + Scoping + mesh : MeshedRegion + Mesh region + requested_location : str + Location (default is elemental) + label1 : str, optional + Properties to apply the filtering 'mat' + and/or 'elshape' (default is + 'elshape') + label2 : str, optional + Properties to apply the filtering 'mat' + and/or 'elshape' (default is + 'elshape') + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.scoping.split_on_property_type() + + >>> # Make input connections + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_label1 = str() + >>> op.inputs.label1.connect(my_label1) + >>> my_label2 = str() + >>> op.inputs.label2.connect(my_label2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.split_on_property_type( + ... mesh_scoping=my_mesh_scoping, + ... mesh=my_mesh, + ... requested_location=my_requested_location, + ... label1=my_label1, + ... label2=my_label2, + ... ) + + >>> # Get output data + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ + + def __init__( + self, + mesh_scoping=None, + mesh=None, + requested_location=None, + label1=None, + label2=None, + config=None, + server=None, + ): + super().__init__(name="scoping::by_property", config=config, server=server) + self._inputs = InputsSplitOnPropertyType(self) + self._outputs = OutputsSplitOnPropertyType(self) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if requested_location is not None: + self.inputs.requested_location.connect(requested_location) + if label1 is not None: + self.inputs.label1.connect(label1) + if label2 is not None: + self.inputs.label2.connect(label2) + + @staticmethod + def _spec(): + description = """Splits a given scoping or the mesh scoping (nodal or elemental) on + given properties (elshape and/or material) and returns a + scopings container with those split scopings.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Scoping""", + ), + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""Mesh region""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string"], + optional=False, + document="""Location (default is elemental)""", + ), + 13: PinSpecification( + name="label", + type_names=["string"], + optional=True, + document="""Properties to apply the filtering 'mat' + and/or 'elshape' (default is + 'elshape')""", + ), + 14: PinSpecification( + name="label", + type_names=["string"], + optional=True, + document="""Properties to apply the filtering 'mat' + and/or 'elshape' (default is + 'elshape')""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scopings_container"], + optional=False, + document="""Scoping""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="scoping::by_property", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsSplitOnPropertyType + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsSplitOnPropertyType + """ + return super().outputs + + +class InputsSplitOnPropertyType(_Inputs): + """Intermediate class used to connect user inputs to + split_on_property_type operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_label1 = str() + >>> op.inputs.label1.connect(my_label1) + >>> my_label2 = str() + >>> op.inputs.label2.connect(my_label2) + """ + + def __init__(self, op: Operator): + super().__init__(split_on_property_type._spec().inputs, op) + self._mesh_scoping = Input( + split_on_property_type._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._mesh = Input(split_on_property_type._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) + self._requested_location = Input( + split_on_property_type._spec().input_pin(9), 9, op, -1 + ) + self._inputs.append(self._requested_location) + self._label1 = Input(split_on_property_type._spec().input_pin(13), 13, op, 0) + self._inputs.append(self._label1) + self._label2 = Input(split_on_property_type._spec().input_pin(14), 14, op, 1) + self._inputs.append(self._label2) + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Scoping + + Parameters + ---------- + my_mesh_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Mesh region + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def requested_location(self): + """Allows to connect requested_location input to the operator. + + Location (default is elemental) + + Parameters + ---------- + my_requested_location : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> op.inputs.requested_location.connect(my_requested_location) + >>> # or + >>> op.inputs.requested_location(my_requested_location) + """ + return self._requested_location + + @property + def label1(self): + """Allows to connect label1 input to the operator. + + Properties to apply the filtering 'mat' + and/or 'elshape' (default is + 'elshape') + + Parameters + ---------- + my_label1 : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> op.inputs.label1.connect(my_label1) + >>> # or + >>> op.inputs.label1(my_label1) + """ + return self._label1 + + @property + def label2(self): + """Allows to connect label2 input to the operator. + + Properties to apply the filtering 'mat' + and/or 'elshape' (default is + 'elshape') + + Parameters + ---------- + my_label2 : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> op.inputs.label2.connect(my_label2) + >>> # or + >>> op.inputs.label2(my_label2) + """ + return self._label2 + + +class OutputsSplitOnPropertyType(_Outputs): + """Intermediate class used to get outputs from + split_on_property_type operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ + + def __init__(self, op: Operator): + super().__init__(split_on_property_type._spec().outputs, op) + self._mesh_scoping = Output(split_on_property_type._spec().output_pin(0), 0, op) + self._outputs.append(self._mesh_scoping) + + @property + def mesh_scoping(self): + """Allows to get mesh_scoping output of the operator + + Returns + ---------- + my_mesh_scoping : ScopingsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ # noqa: E501 + return self._mesh_scoping diff --git a/ansys/dpf/core/operators/scoping/splitted_on_property_type.py b/ansys/dpf/core/operators/scoping/splitted_on_property_type.py deleted file mode 100644 index 59c3cc2f0b3..00000000000 --- a/ansys/dpf/core/operators/scoping/splitted_on_property_type.py +++ /dev/null @@ -1,284 +0,0 @@ -""" -splitted_on_property_type -========================= -""" -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type -from ansys.dpf.core.operators.specification import PinSpecification, Specification - -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" - -class splitted_on_property_type(Operator): - """Splits a given scoping or the mesh scoping (nodal or elemental) on given properties (elshape and/or material) and returns a scopings container with those splitted scopings. - - available inputs: - - mesh_scoping (Scoping) (optional) - - mesh (MeshedRegion) - - requested_location (str) - - label1 (str) (optional) - - label2 (str) (optional) - - available outputs: - - mesh_scoping (ScopingsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.scoping.splitted_on_property_type() - - >>> # Make input connections - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_label1 = str() - >>> op.inputs.label1.connect(my_label1) - >>> my_label2 = str() - >>> op.inputs.label2.connect(my_label2) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.splitted_on_property_type(mesh_scoping=my_mesh_scoping,mesh=my_mesh,requested_location=my_requested_location,label1=my_label1,label2=my_label2) - - >>> # Get output data - >>> result_mesh_scoping = op.outputs.mesh_scoping()""" - def __init__(self, mesh_scoping=None, mesh=None, requested_location=None, label1=None, label2=None, config=None, server=None): - super().__init__(name="scoping::by_property", config = config, server = server) - self._inputs = InputsSplittedOnPropertyType(self) - self._outputs = OutputsSplittedOnPropertyType(self) - if mesh_scoping !=None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if mesh !=None: - self.inputs.mesh.connect(mesh) - if requested_location !=None: - self.inputs.requested_location.connect(requested_location) - if label1 !=None: - self.inputs.label1.connect(label1) - if label2 !=None: - self.inputs.label2.connect(label2) - - @staticmethod - def _spec(): - spec = Specification(description="""Splits a given scoping or the mesh scoping (nodal or elemental) on given properties (elshape and/or material) and returns a scopings container with those splitted scopings.""", - map_input_pin_spec={ - 1 : PinSpecification(name = "mesh_scoping", type_names=["scoping"], optional=True, document="""Scoping"""), - 7 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=False, document="""mesh region"""), - 9 : PinSpecification(name = "requested_location", type_names=["string"], optional=False, document="""location (default is elemental)"""), - 13 : PinSpecification(name = "label", type_names=["string"], optional=True, document="""properties to apply the filtering 'mat' and/or 'elshape' (default is 'elshape)"""), - 14 : PinSpecification(name = "label", type_names=["string"], optional=True, document="""properties to apply the filtering 'mat' and/or 'elshape' (default is 'elshape)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_scoping", type_names=["scopings_container"], optional=False, document="""Scoping""")}) - return spec - - - @staticmethod - def default_config(): - return Operator.default_config(name = "scoping::by_property") - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsSplittedOnPropertyType - """ - return super().inputs - - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsSplittedOnPropertyType - """ - return super().outputs - - -#internal name: scoping::by_property -#scripting name: splitted_on_property_type -class InputsSplittedOnPropertyType(_Inputs): - """Intermediate class used to connect user inputs to splitted_on_property_type operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.splitted_on_property_type() - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> my_label1 = str() - >>> op.inputs.label1.connect(my_label1) - >>> my_label2 = str() - >>> op.inputs.label2.connect(my_label2) - """ - def __init__(self, op: Operator): - super().__init__(splitted_on_property_type._spec().inputs, op) - self._mesh_scoping = Input(splitted_on_property_type._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._mesh_scoping) - self._mesh = Input(splitted_on_property_type._spec().input_pin(7), 7, op, -1) - self._inputs.append(self._mesh) - self._requested_location = Input(splitted_on_property_type._spec().input_pin(9), 9, op, -1) - self._inputs.append(self._requested_location) - self._label1 = Input(splitted_on_property_type._spec().input_pin(13), 13, op, 0) - self._inputs.append(self._label1) - self._label2 = Input(splitted_on_property_type._spec().input_pin(14), 14, op, 1) - self._inputs.append(self._label2) - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator - - - pindoc: Scoping - - Parameters - ---------- - my_mesh_scoping : Scoping, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.splitted_on_property_type() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - - """ - return self._mesh_scoping - - @property - def mesh(self): - """Allows to connect mesh input to the operator - - - pindoc: mesh region - - Parameters - ---------- - my_mesh : MeshedRegion, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.splitted_on_property_type() - >>> op.inputs.mesh.connect(my_mesh) - >>> #or - >>> op.inputs.mesh(my_mesh) - - """ - return self._mesh - - @property - def requested_location(self): - """Allows to connect requested_location input to the operator - - - pindoc: location (default is elemental) - - Parameters - ---------- - my_requested_location : str, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.splitted_on_property_type() - >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or - >>> op.inputs.requested_location(my_requested_location) - - """ - return self._requested_location - - @property - def label1(self): - """Allows to connect label1 input to the operator - - - pindoc: properties to apply the filtering 'mat' and/or 'elshape' (default is 'elshape) - - Parameters - ---------- - my_label1 : str, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.splitted_on_property_type() - >>> op.inputs.label1.connect(my_label1) - >>> #or - >>> op.inputs.label1(my_label1) - - """ - return self._label1 - - @property - def label2(self): - """Allows to connect label2 input to the operator - - - pindoc: properties to apply the filtering 'mat' and/or 'elshape' (default is 'elshape) - - Parameters - ---------- - my_label2 : str, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.splitted_on_property_type() - >>> op.inputs.label2.connect(my_label2) - >>> #or - >>> op.inputs.label2(my_label2) - - """ - return self._label2 - -class OutputsSplittedOnPropertyType(_Outputs): - """Intermediate class used to get outputs from splitted_on_property_type operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.splitted_on_property_type() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() - """ - def __init__(self, op: Operator): - super().__init__(splitted_on_property_type._spec().outputs, op) - self._mesh_scoping = Output(splitted_on_property_type._spec().output_pin(0), 0, op) - self._outputs.append(self._mesh_scoping) - - @property - def mesh_scoping(self): - """Allows to get mesh_scoping output of the operator - - - - pindoc: Scoping - - Returns - ---------- - my_mesh_scoping : ScopingsContainer, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.splitted_on_property_type() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() - """ - return self._mesh_scoping - diff --git a/ansys/dpf/core/operators/scoping/transpose.py b/ansys/dpf/core/operators/scoping/transpose.py index 8b9551a4e24..502cc0c1c19 100644 --- a/ansys/dpf/core/operators/scoping/transpose.py +++ b/ansys/dpf/core/operators/scoping/transpose.py @@ -1,72 +1,137 @@ """ transpose -========= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "scoping" category -""" class transpose(Operator): - """Transposes the input scoping or scopings container (Elemental --> Nodal, or Nodal ---> Elemental), based on the input mesh region. - - available inputs: - - mesh_scoping (Scoping, ScopingsContainer) - - meshed_region (MeshedRegion, MeshesContainer) - - inclusive (int) (optional) - - available outputs: - - mesh_scoping (Scoping ,ScopingsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.scoping.transpose() - - >>> # Make input connections - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> my_inclusive = int() - >>> op.inputs.inclusive.connect(my_inclusive) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.scoping.transpose(mesh_scoping=my_mesh_scoping,meshed_region=my_meshed_region,inclusive=my_inclusive) - - >>> # Get output data - >>> result_mesh_scoping = op.outputs.mesh_scoping()""" - def __init__(self, mesh_scoping=None, meshed_region=None, inclusive=None, config=None, server=None): - super().__init__(name="transpose_scoping", config = config, server = server) + """Transposes the input scoping or scopings container (Elemental --> + Nodal, or Nodal ---> Elemental), based on the input mesh region. + + Parameters + ---------- + mesh_scoping : Scoping or ScopingsContainer + Scoping or scopings container (the input type + is the output type) + meshed_region : MeshedRegion or MeshesContainer + inclusive : int, optional + If inclusive == 1 then all the elements + adjacent to the nodes ids in input + are added, if inclusive == 0, only + the elements which have all their + nodes in the scoping are included + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.scoping.transpose() + + >>> # Make input connections + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_meshed_region = dpf.MeshedRegion() + >>> op.inputs.meshed_region.connect(my_meshed_region) + >>> my_inclusive = int() + >>> op.inputs.inclusive.connect(my_inclusive) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.scoping.transpose( + ... mesh_scoping=my_mesh_scoping, + ... meshed_region=my_meshed_region, + ... inclusive=my_inclusive, + ... ) + + >>> # Get output data + >>> result_mesh_scoping = op.outputs.mesh_scoping() + """ + + def __init__( + self, + mesh_scoping=None, + meshed_region=None, + inclusive=None, + config=None, + server=None, + ): + super().__init__(name="transpose_scoping", config=config, server=server) self._inputs = InputsTranspose(self) self._outputs = OutputsTranspose(self) - if mesh_scoping !=None: + if mesh_scoping is not None: self.inputs.mesh_scoping.connect(mesh_scoping) - if meshed_region !=None: + if meshed_region is not None: self.inputs.meshed_region.connect(meshed_region) - if inclusive !=None: + if inclusive is not None: self.inputs.inclusive.connect(inclusive) @staticmethod def _spec(): - spec = Specification(description="""Transposes the input scoping or scopings container (Elemental --> Nodal, or Nodal ---> Elemental), based on the input mesh region.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "mesh_scoping", type_names=["scoping","scopings_container"], optional=False, document="""Scoping or scopings container (the input type is the output type)"""), - 1 : PinSpecification(name = "meshed_region", type_names=["meshed_region","meshes_container"], optional=False, document=""""""), - 2 : PinSpecification(name = "inclusive", type_names=["int32"], optional=True, document="""if inclusive == 1 then all the elements adjacent to the nodes ids in input are added, if inclusive == 0, only the elements which have all their nodes in the scoping are included""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "mesh_scoping", type_names=["scoping","scopings_container"], optional=False, document="""Scoping or scopings container (the input type is the output type)""")}) + description = """Transposes the input scoping or scopings container (Elemental --> + Nodal, or Nodal ---> Elemental), based on the input mesh + region.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scoping", "scopings_container"], + optional=False, + document="""Scoping or scopings container (the input type + is the output type)""", + ), + 1: PinSpecification( + name="meshed_region", + type_names=["meshed_region", "meshes_container"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="inclusive", + type_names=["int32"], + optional=True, + document="""If inclusive == 1 then all the elements + adjacent to the nodes ids in input + are added, if inclusive == 0, only + the elements which have all their + nodes in the scoping are included""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="mesh_scoping", + type_names=["scoping", "scopings_container"], + optional=False, + document="""Scoping or scopings container (the input type + is the output type)""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "transpose_scoping") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="transpose_scoping", server=server) @property def inputs(self): @@ -74,126 +139,137 @@ def inputs(self): Returns -------- - inputs : InputsTranspose + inputs : InputsTranspose """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsTranspose + outputs : OutputsTranspose """ return super().outputs -#internal name: transpose_scoping -#scripting name: transpose class InputsTranspose(_Inputs): - """Intermediate class used to connect user inputs to transpose operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.transpose() - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_meshed_region = dpf.MeshedRegion() - >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> my_inclusive = int() - >>> op.inputs.inclusive.connect(my_inclusive) + """Intermediate class used to connect user inputs to + transpose operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.transpose() + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_meshed_region = dpf.MeshedRegion() + >>> op.inputs.meshed_region.connect(my_meshed_region) + >>> my_inclusive = int() + >>> op.inputs.inclusive.connect(my_inclusive) """ + def __init__(self, op: Operator): super().__init__(transpose._spec().inputs, op) - self._mesh_scoping = Input(transpose._spec().input_pin(0), 0, op, -1) + self._mesh_scoping = Input(transpose._spec().input_pin(0), 0, op, -1) self._inputs.append(self._mesh_scoping) - self._meshed_region = Input(transpose._spec().input_pin(1), 1, op, -1) + self._meshed_region = Input(transpose._spec().input_pin(1), 1, op, -1) self._inputs.append(self._meshed_region) - self._inclusive = Input(transpose._spec().input_pin(2), 2, op, -1) + self._inclusive = Input(transpose._spec().input_pin(2), 2, op, -1) self._inputs.append(self._inclusive) @property def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator + """Allows to connect mesh_scoping input to the operator. - - pindoc: Scoping or scopings container (the input type is the output type) + Scoping or scopings container (the input type + is the output type) Parameters ---------- - my_mesh_scoping : Scoping, ScopingsContainer, + my_mesh_scoping : Scoping or ScopingsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.transpose() >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> #or + >>> # or >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ return self._mesh_scoping @property def meshed_region(self): - """Allows to connect meshed_region input to the operator + """Allows to connect meshed_region input to the operator. Parameters ---------- - my_meshed_region : MeshedRegion, MeshesContainer, + my_meshed_region : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.transpose() >>> op.inputs.meshed_region.connect(my_meshed_region) - >>> #or + >>> # or >>> op.inputs.meshed_region(my_meshed_region) - """ return self._meshed_region @property def inclusive(self): - """Allows to connect inclusive input to the operator + """Allows to connect inclusive input to the operator. - - pindoc: if inclusive == 1 then all the elements adjacent to the nodes ids in input are added, if inclusive == 0, only the elements which have all their nodes in the scoping are included + If inclusive == 1 then all the elements + adjacent to the nodes ids in input + are added, if inclusive == 0, only + the elements which have all their + nodes in the scoping are included Parameters ---------- - my_inclusive : int, + my_inclusive : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.scoping.transpose() >>> op.inputs.inclusive.connect(my_inclusive) - >>> #or + >>> # or >>> op.inputs.inclusive(my_inclusive) - """ return self._inclusive + class OutputsTranspose(_Outputs): - """Intermediate class used to get outputs from transpose operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.scoping.transpose() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh_scoping = op.outputs.mesh_scoping() + """Intermediate class used to get outputs from + transpose operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.transpose() + >>> # Connect inputs : op.inputs. ... + >>> result_mesh_scoping = op.outputs.mesh_scoping() """ + def __init__(self, op: Operator): super().__init__(transpose._spec().outputs, op) - self.mesh_scoping_as_scoping = Output( _modify_output_spec_with_one_type(transpose._spec().output_pin(0), "scoping"), 0, op) + self.mesh_scoping_as_scoping = Output( + _modify_output_spec_with_one_type( + transpose._spec().output_pin(0), "scoping" + ), + 0, + op, + ) self._outputs.append(self.mesh_scoping_as_scoping) - self.mesh_scoping_as_scopings_container = Output( _modify_output_spec_with_one_type(transpose._spec().output_pin(0), "scopings_container"), 0, op) + self.mesh_scoping_as_scopings_container = Output( + _modify_output_spec_with_one_type( + transpose._spec().output_pin(0), "scopings_container" + ), + 0, + op, + ) self._outputs.append(self.mesh_scoping_as_scopings_container) - diff --git a/ansys/dpf/core/operators/serialization/__init__.py b/ansys/dpf/core/operators/serialization/__init__.py index 2d0d98e3341..3b859e4e552 100644 --- a/ansys/dpf/core/operators/serialization/__init__.py +++ b/ansys/dpf/core/operators/serialization/__init__.py @@ -1,8 +1,10 @@ from .serializer import serializer from .mechanical_csv_to_field import mechanical_csv_to_field from .field_to_csv import field_to_csv -from .deserializer import deserializer from .csv_to_field import csv_to_field +from .deserializer import deserializer +from .serializer_to_string import serializer_to_string +from .string_deserializer import string_deserializer from .vtk_export import vtk_export from .vtk_to_fields import vtk_to_fields from .migrate_file_to_vtk import migrate_file_to_vtk diff --git a/ansys/dpf/core/operators/serialization/csv_to_field.py b/ansys/dpf/core/operators/serialization/csv_to_field.py index be16096b8e5..fc78c59114c 100644 --- a/ansys/dpf/core/operators/serialization/csv_to_field.py +++ b/ansys/dpf/core/operators/serialization/csv_to_field.py @@ -1,66 +1,104 @@ """ csv_to_field -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "serialization" category -""" class csv_to_field(Operator): """transform csv file to a field or fields container - available inputs: - - time_scoping (Scoping) (optional) - - data_sources (DataSources) + Parameters + ---------- + time_scoping : Scoping, optional + data_sources : DataSources + Data sources containing a file with csv + extension + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.serialization.csv_to_field() - >>> # Instantiate operator - >>> op = dpf.operators.serialization.csv_to_field() + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.csv_to_field( + ... time_scoping=my_time_scoping, + ... data_sources=my_data_sources, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.csv_to_field(time_scoping=my_time_scoping,data_sources=my_data_sources) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, time_scoping=None, data_sources=None, config=None, server=None): - super().__init__(name="csv_to_field", config = config, server = server) + super().__init__(name="csv_to_field", config=config, server=server) self._inputs = InputsCsvToField(self) self._outputs = OutputsCsvToField(self) - if time_scoping !=None: + if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""transform csv file to a field or fields container""", - map_input_pin_spec={ - 0 : PinSpecification(name = "time_scoping", type_names=["scoping"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""data sources containing a file with csv extension""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """transform csv file to a field or fields container""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Data sources containing a file with csv + extension""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "csv_to_field") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="csv_to_field", server=server) @property def inputs(self): @@ -68,117 +106,112 @@ def inputs(self): Returns -------- - inputs : InputsCsvToField + inputs : InputsCsvToField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsCsvToField + outputs : OutputsCsvToField """ return super().outputs -#internal name: csv_to_field -#scripting name: csv_to_field class InputsCsvToField(_Inputs): - """Intermediate class used to connect user inputs to csv_to_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.csv_to_field() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + csv_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.csv_to_field() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(csv_to_field._spec().inputs, op) - self._time_scoping = Input(csv_to_field._spec().input_pin(0), 0, op, -1) + self._time_scoping = Input(csv_to_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._time_scoping) - self._data_sources = Input(csv_to_field._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(csv_to_field._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def time_scoping(self): - """Allows to connect time_scoping input to the operator + """Allows to connect time_scoping input to the operator. Parameters ---------- - my_time_scoping : Scoping, + my_time_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.csv_to_field() >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> #or + >>> # or >>> op.inputs.time_scoping(my_time_scoping) - """ return self._time_scoping @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. - - pindoc: data sources containing a file with csv extension + Data sources containing a file with csv + extension Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.csv_to_field() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsCsvToField(_Outputs): - """Intermediate class used to get outputs from csv_to_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.csv_to_field() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + csv_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.csv_to_field() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(csv_to_field._spec().outputs, op) - self._fields_container = Output(csv_to_field._spec().output_pin(0), 0, op) + self._fields_container = Output(csv_to_field._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.csv_to_field() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/serialization/deserializer.py b/ansys/dpf/core/operators/serialization/deserializer.py index 846d36851fa..fcf9e9a8132 100644 --- a/ansys/dpf/core/operators/serialization/deserializer.py +++ b/ansys/dpf/core/operators/serialization/deserializer.py @@ -1,63 +1,101 @@ """ deserializer -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "serialization" category -""" class deserializer(Operator): - """Takes a file generated by the serializer and deserializes it into DPF's entities. + """Takes a file generated by the serializer and deserializes it into + DPF's entities. + + Parameters + ---------- + file_path : str + File path - available inputs: - - file_path (str) - available outputs: - - any_output1 () - - any_output2 () + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.serialization.deserializer() - >>> # Instantiate operator - >>> op = dpf.operators.serialization.deserializer() + >>> # Make input connections + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) - >>> # Make input connections - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.deserializer( + ... file_path=my_file_path, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.deserializer(file_path=my_file_path) + >>> # Get output data + >>> result_any_output1 = op.outputs.any_output1() + >>> result_any_output2 = op.outputs.any_output2() + """ - >>> # Get output data - >>> result_any_output1 = op.outputs.any_output1() - >>> result_any_output2 = op.outputs.any_output2()""" def __init__(self, file_path=None, config=None, server=None): - super().__init__(name="deserializer", config = config, server = server) + super().__init__(name="deserializer", config=config, server=server) self._inputs = InputsDeserializer(self) self._outputs = OutputsDeserializer(self) - if file_path !=None: + if file_path is not None: self.inputs.file_path.connect(file_path) @staticmethod def _spec(): - spec = Specification(description="""Takes a file generated by the serializer and deserializes it into DPF's entities.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "file_path", type_names=["string"], optional=False, document="""file path""")}, - map_output_pin_spec={ - 1 : PinSpecification(name = "any_output", type_names=[], optional=False, document="""number and types of outputs corresponding of the inputs used in the serialization"""), - 2 : PinSpecification(name = "any_output", type_names=[], optional=False, document="""number and types of outputs corresponding of the inputs used in the serialization""")}) + description = """Takes a file generated by the serializer and deserializes it into + DPF's entities.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="file_path", + type_names=["string"], + optional=False, + document="""File path""", + ), + }, + map_output_pin_spec={ + 1: PinSpecification( + name="any_output1", + type_names=["any"], + optional=False, + document="""Number and types of outputs corresponding of + the inputs used in the serialization""", + ), + 2: PinSpecification( + name="any_output2", + type_names=["any"], + optional=False, + document="""Number and types of outputs corresponding of + the inputs used in the serialization""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "deserializer") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="deserializer", server=server) @property def inputs(self): @@ -65,72 +103,109 @@ def inputs(self): Returns -------- - inputs : InputsDeserializer + inputs : InputsDeserializer """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsDeserializer + outputs : OutputsDeserializer """ return super().outputs -#internal name: deserializer -#scripting name: deserializer class InputsDeserializer(_Inputs): - """Intermediate class used to connect user inputs to deserializer operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.deserializer() - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) + """Intermediate class used to connect user inputs to + deserializer operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.deserializer() + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) """ + def __init__(self, op: Operator): super().__init__(deserializer._spec().inputs, op) - self._file_path = Input(deserializer._spec().input_pin(0), 0, op, -1) + self._file_path = Input(deserializer._spec().input_pin(0), 0, op, -1) self._inputs.append(self._file_path) @property def file_path(self): - """Allows to connect file_path input to the operator + """Allows to connect file_path input to the operator. - - pindoc: file path + File path Parameters ---------- - my_file_path : str, + my_file_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.deserializer() >>> op.inputs.file_path.connect(my_file_path) - >>> #or + >>> # or >>> op.inputs.file_path(my_file_path) - """ return self._file_path -class OutputsDeserializer(_Outputs): - """Intermediate class used to get outputs from deserializer operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.deserializer() - >>> # Connect inputs : op.inputs. ... +class OutputsDeserializer(_Outputs): + """Intermediate class used to get outputs from + deserializer operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.deserializer() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output1 = op.outputs.any_output1() + >>> result_any_output2 = op.outputs.any_output2() """ + def __init__(self, op: Operator): super().__init__(deserializer._spec().outputs, op) - pass + self._any_output1 = Output(deserializer._spec().output_pin(1), 1, op) + self._outputs.append(self._any_output1) + self._any_output2 = Output(deserializer._spec().output_pin(2), 2, op) + self._outputs.append(self._any_output2) + @property + def any_output1(self): + """Allows to get any_output1 output of the operator + + Returns + ---------- + my_any_output1 : Any + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.deserializer() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output1 = op.outputs.any_output1() + """ # noqa: E501 + return self._any_output1 + + @property + def any_output2(self): + """Allows to get any_output2 output of the operator + + Returns + ---------- + my_any_output2 : Any + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.deserializer() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output2 = op.outputs.any_output2() + """ # noqa: E501 + return self._any_output2 diff --git a/ansys/dpf/core/operators/serialization/field_to_csv.py b/ansys/dpf/core/operators/serialization/field_to_csv.py index ff77e9d4cdf..8ff33e56bf9 100644 --- a/ansys/dpf/core/operators/serialization/field_to_csv.py +++ b/ansys/dpf/core/operators/serialization/field_to_csv.py @@ -1,71 +1,125 @@ """ field_to_csv -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "serialization" category -""" class field_to_csv(Operator): """Exports a field or a fields container into a csv file - available inputs: - - field_or_fields_container (FieldsContainer, Field) - - file_path (str) - - storage_type (int) (optional) - - available outputs: - - - Examples - -------- - >>> from ansys.dpf import core as dpf + Parameters + ---------- + field_or_fields_container : FieldsContainer or Field + Field_or_fields_container + file_path : str + storage_type : int, optional + Storage type : if matrices (without any + particularity) are included in the + fields container, the storage format + can be chosen. 0 : flat/line format, + 1 : ranked format. if 1 is chosen, + the csv can not be read by "csv to + field" operator anymore. default : 0. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.field_to_csv() + + >>> # Make input connections + >>> my_field_or_fields_container = dpf.FieldsContainer() + >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + >>> my_storage_type = int() + >>> op.inputs.storage_type.connect(my_storage_type) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.field_to_csv( + ... field_or_fields_container=my_field_or_fields_container, + ... file_path=my_file_path, + ... storage_type=my_storage_type, + ... ) - >>> # Instantiate operator - >>> op = dpf.operators.serialization.field_to_csv() - - >>> # Make input connections - >>> my_field_or_fields_container = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) - >>> my_storage_type = int() - >>> op.inputs.storage_type.connect(my_storage_type) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.field_to_csv(field_or_fields_container=my_field_or_fields_container,file_path=my_file_path,storage_type=my_storage_type) + """ - >>> # Get output data""" - def __init__(self, field_or_fields_container=None, file_path=None, storage_type=None, config=None, server=None): - super().__init__(name="field_to_csv", config = config, server = server) + def __init__( + self, + field_or_fields_container=None, + file_path=None, + storage_type=None, + config=None, + server=None, + ): + super().__init__(name="field_to_csv", config=config, server=server) self._inputs = InputsFieldToCsv(self) self._outputs = OutputsFieldToCsv(self) - if field_or_fields_container !=None: + if field_or_fields_container is not None: self.inputs.field_or_fields_container.connect(field_or_fields_container) - if file_path !=None: + if file_path is not None: self.inputs.file_path.connect(file_path) - if storage_type !=None: + if storage_type is not None: self.inputs.storage_type.connect(storage_type) @staticmethod def _spec(): - spec = Specification(description="""Exports a field or a fields container into a csv file""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field_or_fields_container", type_names=["fields_container","field"], optional=False, document="""field_or_fields_container"""), - 1 : PinSpecification(name = "file_path", type_names=["string"], optional=False, document=""""""), - 2 : PinSpecification(name = "storage_type", type_names=["int32"], optional=True, document="""storage type : if matrices (without any particularity) are included in the fields container, the storage format can be chosen. 0 : flat/line format, 1 : ranked format. If 1 is chosen, the csv can not be read by "csv to field" operator anymore. Default : 0.""")}, - map_output_pin_spec={ -}) + description = """Exports a field or a fields container into a csv file""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field_or_fields_container", + type_names=["fields_container", "field"], + optional=False, + document="""Field_or_fields_container""", + ), + 1: PinSpecification( + name="file_path", + type_names=["string"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="storage_type", + type_names=["int32"], + optional=True, + document="""Storage type : if matrices (without any + particularity) are included in the + fields container, the storage format + can be chosen. 0 : flat/line format, + 1 : ranked format. if 1 is chosen, + the csv can not be read by "csv to + field" operator anymore. default : 0.""", + ), + }, + map_output_pin_spec={}, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "field_to_csv") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="field_to_csv", server=server) @property def inputs(self): @@ -73,122 +127,123 @@ def inputs(self): Returns -------- - inputs : InputsFieldToCsv + inputs : InputsFieldToCsv """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFieldToCsv + outputs : OutputsFieldToCsv """ return super().outputs -#internal name: field_to_csv -#scripting name: field_to_csv class InputsFieldToCsv(_Inputs): - """Intermediate class used to connect user inputs to field_to_csv operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.field_to_csv() - >>> my_field_or_fields_container = dpf.FieldsContainer() - >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) - >>> my_storage_type = int() - >>> op.inputs.storage_type.connect(my_storage_type) + """Intermediate class used to connect user inputs to + field_to_csv operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.field_to_csv() + >>> my_field_or_fields_container = dpf.FieldsContainer() + >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + >>> my_storage_type = int() + >>> op.inputs.storage_type.connect(my_storage_type) """ + def __init__(self, op: Operator): super().__init__(field_to_csv._spec().inputs, op) - self._field_or_fields_container = Input(field_to_csv._spec().input_pin(0), 0, op, -1) + self._field_or_fields_container = Input( + field_to_csv._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._field_or_fields_container) - self._file_path = Input(field_to_csv._spec().input_pin(1), 1, op, -1) + self._file_path = Input(field_to_csv._spec().input_pin(1), 1, op, -1) self._inputs.append(self._file_path) - self._storage_type = Input(field_to_csv._spec().input_pin(2), 2, op, -1) + self._storage_type = Input(field_to_csv._spec().input_pin(2), 2, op, -1) self._inputs.append(self._storage_type) @property def field_or_fields_container(self): - """Allows to connect field_or_fields_container input to the operator + """Allows to connect field_or_fields_container input to the operator. - - pindoc: field_or_fields_container + Field_or_fields_container Parameters ---------- - my_field_or_fields_container : FieldsContainer, Field, + my_field_or_fields_container : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.field_to_csv() >>> op.inputs.field_or_fields_container.connect(my_field_or_fields_container) - >>> #or + >>> # or >>> op.inputs.field_or_fields_container(my_field_or_fields_container) - """ return self._field_or_fields_container @property def file_path(self): - """Allows to connect file_path input to the operator + """Allows to connect file_path input to the operator. Parameters ---------- - my_file_path : str, + my_file_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.field_to_csv() >>> op.inputs.file_path.connect(my_file_path) - >>> #or + >>> # or >>> op.inputs.file_path(my_file_path) - """ return self._file_path @property def storage_type(self): - """Allows to connect storage_type input to the operator + """Allows to connect storage_type input to the operator. - - pindoc: storage type : if matrices (without any particularity) are included in the fields container, the storage format can be chosen. 0 : flat/line format, 1 : ranked format. If 1 is chosen, the csv can not be read by "csv to field" operator anymore. Default : 0. + Storage type : if matrices (without any + particularity) are included in the + fields container, the storage format + can be chosen. 0 : flat/line format, + 1 : ranked format. if 1 is chosen, + the csv can not be read by "csv to + field" operator anymore. default : 0. Parameters ---------- - my_storage_type : int, + my_storage_type : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.field_to_csv() >>> op.inputs.storage_type.connect(my_storage_type) - >>> #or + >>> # or >>> op.inputs.storage_type(my_storage_type) - """ return self._storage_type -class OutputsFieldToCsv(_Outputs): - """Intermediate class used to get outputs from field_to_csv operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.field_to_csv() - >>> # Connect inputs : op.inputs. ... +class OutputsFieldToCsv(_Outputs): + """Intermediate class used to get outputs from + field_to_csv operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.field_to_csv() + >>> # Connect inputs : op.inputs. ... """ + def __init__(self, op: Operator): super().__init__(field_to_csv._spec().outputs, op) - pass - diff --git a/ansys/dpf/core/operators/serialization/mechanical_csv_to_field.py b/ansys/dpf/core/operators/serialization/mechanical_csv_to_field.py index 2b07f5dd081..9c3096176de 100644 --- a/ansys/dpf/core/operators/serialization/mechanical_csv_to_field.py +++ b/ansys/dpf/core/operators/serialization/mechanical_csv_to_field.py @@ -1,74 +1,133 @@ """ mechanical_csv_to_field -======================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "serialization" category -""" class mechanical_csv_to_field(Operator): """Reads mechanical exported csv file - available inputs: - - unit () - - mesh (MeshedRegion) (optional) - - data_sources (DataSources) - - requested_location (str, FieldDefinition) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.serialization.mechanical_csv_to_field() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.mechanical_csv_to_field(mesh=my_mesh,data_sources=my_data_sources,requested_location=my_requested_location) - - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, mesh=None, data_sources=None, requested_location=None, config=None, server=None): - super().__init__(name="mechanical_csv_to_field", config = config, server = server) + Parameters + ---------- + unit : Class Dataprocessing::Unit::Cunit + mesh : MeshedRegion, optional + data_sources : DataSources + requested_location : str or FieldDefinition + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.mechanical_csv_to_field() + + >>> # Make input connections + >>> my_unit = dpf.Class Dataprocessing::Unit::Cunit() + >>> op.inputs.unit.connect(my_unit) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.mechanical_csv_to_field( + ... unit=my_unit, + ... mesh=my_mesh, + ... data_sources=my_data_sources, + ... requested_location=my_requested_location, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + unit=None, + mesh=None, + data_sources=None, + requested_location=None, + config=None, + server=None, + ): + super().__init__(name="mechanical_csv_to_field", config=config, server=server) self._inputs = InputsMechanicalCsvToField(self) self._outputs = OutputsMechanicalCsvToField(self) - if mesh !=None: + if unit is not None: + self.inputs.unit.connect(unit) + if mesh is not None: self.inputs.mesh.connect(mesh) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) - if requested_location !=None: + if requested_location is not None: self.inputs.requested_location.connect(requested_location) @staticmethod def _spec(): - spec = Specification(description="""Reads mechanical exported csv file""", - map_input_pin_spec={ - 0 : PinSpecification(name = "unit", type_names=[], optional=False, document=""""""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document=""""""), - 9 : PinSpecification(name = "requested_location", type_names=["string","field_definition"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Reads mechanical exported csv file""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="unit", + type_names=["class dataProcessing::unit::CUnit"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + 9: PinSpecification( + name="requested_location", + type_names=["string", "field_definition"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "mechanical_csv_to_field") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="mechanical_csv_to_field", server=server) @property def inputs(self): @@ -76,139 +135,157 @@ def inputs(self): Returns -------- - inputs : InputsMechanicalCsvToField + inputs : InputsMechanicalCsvToField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMechanicalCsvToField + outputs : OutputsMechanicalCsvToField """ return super().outputs -#internal name: mechanical_csv_to_field -#scripting name: mechanical_csv_to_field class InputsMechanicalCsvToField(_Inputs): - """Intermediate class used to connect user inputs to mechanical_csv_to_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.mechanical_csv_to_field() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_requested_location = str() - >>> op.inputs.requested_location.connect(my_requested_location) + """Intermediate class used to connect user inputs to + mechanical_csv_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.mechanical_csv_to_field() + >>> my_unit = dpf.Class Dataprocessing::Unit::Cunit() + >>> op.inputs.unit.connect(my_unit) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_requested_location = str() + >>> op.inputs.requested_location.connect(my_requested_location) """ + def __init__(self, op: Operator): super().__init__(mechanical_csv_to_field._spec().inputs, op) - self._mesh = Input(mechanical_csv_to_field._spec().input_pin(1), 1, op, -1) + self._unit = Input(mechanical_csv_to_field._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._unit) + self._mesh = Input(mechanical_csv_to_field._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._data_sources = Input(mechanical_csv_to_field._spec().input_pin(4), 4, op, -1) + self._data_sources = Input( + mechanical_csv_to_field._spec().input_pin(4), 4, op, -1 + ) self._inputs.append(self._data_sources) - self._requested_location = Input(mechanical_csv_to_field._spec().input_pin(9), 9, op, -1) + self._requested_location = Input( + mechanical_csv_to_field._spec().input_pin(9), 9, op, -1 + ) self._inputs.append(self._requested_location) @property - def mesh(self): - """Allows to connect mesh input to the operator + def unit(self): + """Allows to connect unit input to the operator. Parameters ---------- - my_mesh : MeshedRegion, + my_unit : Class Dataprocessing::Unit::Cunit Examples -------- >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.mechanical_csv_to_field() + >>> op.inputs.unit.connect(my_unit) + >>> # or + >>> op.inputs.unit(my_unit) + """ + return self._unit + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf >>> op = dpf.operators.serialization.mechanical_csv_to_field() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.mechanical_csv_to_field() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources @property def requested_location(self): - """Allows to connect requested_location input to the operator + """Allows to connect requested_location input to the operator. Parameters ---------- - my_requested_location : str, FieldDefinition, + my_requested_location : str or FieldDefinition Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.mechanical_csv_to_field() >>> op.inputs.requested_location.connect(my_requested_location) - >>> #or + >>> # or >>> op.inputs.requested_location(my_requested_location) - """ return self._requested_location + class OutputsMechanicalCsvToField(_Outputs): - """Intermediate class used to get outputs from mechanical_csv_to_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.mechanical_csv_to_field() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + mechanical_csv_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.mechanical_csv_to_field() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(mechanical_csv_to_field._spec().outputs, op) - self._field = Output(mechanical_csv_to_field._spec().output_pin(0), 0, op) + self._field = Output(mechanical_csv_to_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.mechanical_csv_to_field() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/serialization/migrate_file_to_vtk.py b/ansys/dpf/core/operators/serialization/migrate_file_to_vtk.py index 62d82b1610f..c0738ee12d2 100644 --- a/ansys/dpf/core/operators/serialization/migrate_file_to_vtk.py +++ b/ansys/dpf/core/operators/serialization/migrate_file_to_vtk.py @@ -1,72 +1,122 @@ """ migrate_file_to_vtk -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "serialization" category -""" class migrate_file_to_vtk(Operator): - """Take an input data sources or streams and convert as much data as possible to vtk. - - available inputs: - - output_filename (str) (optional) - - streams_container (StreamsContainer) (optional) - - data_sources (DataSources) (optional) - - available outputs: - - data_sources (DataSources) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.serialization.migrate_file_to_vtk() - - >>> # Make input connections - >>> my_output_filename = str() - >>> op.inputs.output_filename.connect(my_output_filename) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.migrate_file_to_vtk(output_filename=my_output_filename,streams_container=my_streams_container,data_sources=my_data_sources) - - >>> # Get output data - >>> result_data_sources = op.outputs.data_sources()""" - def __init__(self, output_filename=None, streams_container=None, data_sources=None, config=None, server=None): - super().__init__(name="vtk::migrate_file", config = config, server = server) + """Take an input data sources or streams and convert as much data as + possible to vtk. + + Parameters + ---------- + output_filename : str, optional + streams_container : StreamsContainer, optional + data_sources : DataSources, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.migrate_file_to_vtk() + + >>> # Make input connections + >>> my_output_filename = str() + >>> op.inputs.output_filename.connect(my_output_filename) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.migrate_file_to_vtk( + ... output_filename=my_output_filename, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_data_sources = op.outputs.data_sources() + """ + + def __init__( + self, + output_filename=None, + streams_container=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__(name="vtk::migrate_file", config=config, server=server) self._inputs = InputsMigrateFileToVtk(self) self._outputs = OutputsMigrateFileToVtk(self) - if output_filename !=None: + if output_filename is not None: self.inputs.output_filename.connect(output_filename) - if streams_container !=None: + if streams_container is not None: self.inputs.streams_container.connect(streams_container) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Take an input data sources or streams and convert as much data as possible to vtk.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "output_filename", type_names=["string"], optional=True, document=""""""), - 3 : PinSpecification(name = "streams_container", type_names=["streams_container"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=True, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""Generated output vtk file""")}) + description = """Take an input data sources or streams and convert as much data as + possible to vtk.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="output_filename", + type_names=["string"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Generated output vtk file""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "vtk::migrate_file") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="vtk::migrate_file", server=server) @property def inputs(self): @@ -74,141 +124,135 @@ def inputs(self): Returns -------- - inputs : InputsMigrateFileToVtk + inputs : InputsMigrateFileToVtk """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsMigrateFileToVtk + outputs : OutputsMigrateFileToVtk """ return super().outputs -#internal name: vtk::migrate_file -#scripting name: migrate_file_to_vtk class InputsMigrateFileToVtk(_Inputs): - """Intermediate class used to connect user inputs to migrate_file_to_vtk operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.migrate_file_to_vtk() - >>> my_output_filename = str() - >>> op.inputs.output_filename.connect(my_output_filename) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + migrate_file_to_vtk operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_file_to_vtk() + >>> my_output_filename = str() + >>> op.inputs.output_filename.connect(my_output_filename) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(migrate_file_to_vtk._spec().inputs, op) - self._output_filename = Input(migrate_file_to_vtk._spec().input_pin(0), 0, op, -1) + self._output_filename = Input( + migrate_file_to_vtk._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._output_filename) - self._streams_container = Input(migrate_file_to_vtk._spec().input_pin(3), 3, op, -1) + self._streams_container = Input( + migrate_file_to_vtk._spec().input_pin(3), 3, op, -1 + ) self._inputs.append(self._streams_container) - self._data_sources = Input(migrate_file_to_vtk._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(migrate_file_to_vtk._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def output_filename(self): - """Allows to connect output_filename input to the operator + """Allows to connect output_filename input to the operator. Parameters ---------- - my_output_filename : str, + my_output_filename : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.migrate_file_to_vtk() >>> op.inputs.output_filename.connect(my_output_filename) - >>> #or + >>> # or >>> op.inputs.output_filename(my_output_filename) - """ return self._output_filename @property def streams_container(self): - """Allows to connect streams_container input to the operator + """Allows to connect streams_container input to the operator. Parameters ---------- - my_streams_container : StreamsContainer, + my_streams_container : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.migrate_file_to_vtk() >>> op.inputs.streams_container.connect(my_streams_container) - >>> #or + >>> # or >>> op.inputs.streams_container(my_streams_container) - """ return self._streams_container @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.migrate_file_to_vtk() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsMigrateFileToVtk(_Outputs): - """Intermediate class used to get outputs from migrate_file_to_vtk operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.migrate_file_to_vtk() - >>> # Connect inputs : op.inputs. ... - >>> result_data_sources = op.outputs.data_sources() + """Intermediate class used to get outputs from + migrate_file_to_vtk operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_file_to_vtk() + >>> # Connect inputs : op.inputs. ... + >>> result_data_sources = op.outputs.data_sources() """ + def __init__(self, op: Operator): super().__init__(migrate_file_to_vtk._spec().outputs, op) - self._data_sources = Output(migrate_file_to_vtk._spec().output_pin(0), 0, op) + self._data_sources = Output(migrate_file_to_vtk._spec().output_pin(0), 0, op) self._outputs.append(self._data_sources) @property def data_sources(self): """Allows to get data_sources output of the operator - - - pindoc: Generated output vtk file - Returns ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.migrate_file_to_vtk() >>> # Connect inputs : op.inputs. ... - >>> result_data_sources = op.outputs.data_sources() - """ + >>> result_data_sources = op.outputs.data_sources() + """ # noqa: E501 return self._data_sources - diff --git a/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py b/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py index ab01a68281b..030df721edf 100644 --- a/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py +++ b/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py @@ -1,83 +1,152 @@ """ serialize_to_hdf5 -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Hdf5 plugin, from "serialization" category -""" class serialize_to_hdf5(Operator): """Serialize the inputs in an hdf5 format. - available inputs: - - file_path (str) - - export_floats (bool) - - export_flat_vectors (bool) - - data1 (Any) - - data2 (Any) - - available outputs: - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.serialization.serialize_to_hdf5() - - >>> # Make input connections - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) - >>> my_export_floats = bool() - >>> op.inputs.export_floats.connect(my_export_floats) - >>> my_export_flat_vectors = bool() - >>> op.inputs.export_flat_vectors.connect(my_export_flat_vectors) - >>> my_data1 = dpf.Any() - >>> op.inputs.data1.connect(my_data1) - >>> my_data2 = dpf.Any() - >>> op.inputs.data2.connect(my_data2) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.serialize_to_hdf5(file_path=my_file_path,export_floats=my_export_floats,export_flat_vectors=my_export_flat_vectors,data1=my_data1,data2=my_data2) - - >>> # Get output data""" - def __init__(self, file_path=None, export_floats=None, export_flat_vectors=None, data1=None, data2=None, config=None, server=None): - super().__init__(name="serialize_to_hdf5", config = config, server = server) + Parameters + ---------- + file_path : str + Output file path with .h5 extension + export_floats : bool + Converts double to float to reduce file size + (default is true) + export_flat_vectors : bool + If true, vectors and matrices data are + exported flat (x1,y1,z1,x2,y2,z2..) + (default is false) + data1 : + Only the data set explicitly to export is + exported + data2 : + Only the data set explicitly to export is + exported + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.serialize_to_hdf5() + + >>> # Make input connections + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + >>> my_export_floats = bool() + >>> op.inputs.export_floats.connect(my_export_floats) + >>> my_export_flat_vectors = bool() + >>> op.inputs.export_flat_vectors.connect(my_export_flat_vectors) + >>> my_data1 = dpf.() + >>> op.inputs.data1.connect(my_data1) + >>> my_data2 = dpf.() + >>> op.inputs.data2.connect(my_data2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.serialize_to_hdf5( + ... file_path=my_file_path, + ... export_floats=my_export_floats, + ... export_flat_vectors=my_export_flat_vectors, + ... data1=my_data1, + ... data2=my_data2, + ... ) + + """ + + def __init__( + self, + file_path=None, + export_floats=None, + export_flat_vectors=None, + data1=None, + data2=None, + config=None, + server=None, + ): + super().__init__(name="serialize_to_hdf5", config=config, server=server) self._inputs = InputsSerializeToHdf5(self) self._outputs = OutputsSerializeToHdf5(self) - if file_path !=None: + if file_path is not None: self.inputs.file_path.connect(file_path) - if export_floats !=None: + if export_floats is not None: self.inputs.export_floats.connect(export_floats) - if export_flat_vectors !=None: + if export_flat_vectors is not None: self.inputs.export_flat_vectors.connect(export_flat_vectors) - if data1 !=None: + if data1 is not None: self.inputs.data1.connect(data1) - if data2 !=None: + if data2 is not None: self.inputs.data2.connect(data2) @staticmethod def _spec(): - spec = Specification(description="""Serialize the inputs in an hdf5 format.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "file_path", type_names=["string"], optional=False, document="""output file path with .h5 extension"""), - 1 : PinSpecification(name = "export_floats", type_names=["bool"], optional=False, document="""converts double to float to reduce file size (default is true)"""), - 2 : PinSpecification(name = "export_flat_vectors", type_names=["bool"], optional=False, document="""if true, vectors and matrices data are exported flat (x1,y1,z1,x2,y2,z2..) (default is false)"""), - 3 : PinSpecification(name = "data", type_names=["any"], optional=False, document="""only the data set explicitly to export is exported"""), - 4 : PinSpecification(name = "data", type_names=["any"], optional=False, document="""only the data set explicitly to export is exported""")}, - map_output_pin_spec={ -}) + description = """Serialize the inputs in an hdf5 format.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="file_path", + type_names=["string"], + optional=False, + document="""Output file path with .h5 extension""", + ), + 1: PinSpecification( + name="export_floats", + type_names=["bool"], + optional=False, + document="""Converts double to float to reduce file size + (default is true)""", + ), + 2: PinSpecification( + name="export_flat_vectors", + type_names=["bool"], + optional=False, + document="""If true, vectors and matrices data are + exported flat (x1,y1,z1,x2,y2,z2..) + (default is false)""", + ), + 3: PinSpecification( + name="data", + type_names=["any"], + optional=False, + document="""Only the data set explicitly to export is + exported""", + ), + 4: PinSpecification( + name="data", + type_names=["any"], + optional=False, + document="""Only the data set explicitly to export is + exported""", + ), + }, + map_output_pin_spec={}, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "serialize_to_hdf5") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="serialize_to_hdf5", server=server) @property def inputs(self): @@ -85,176 +154,172 @@ def inputs(self): Returns -------- - inputs : InputsSerializeToHdf5 + inputs : InputsSerializeToHdf5 """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSerializeToHdf5 + outputs : OutputsSerializeToHdf5 """ return super().outputs -#internal name: serialize_to_hdf5 -#scripting name: serialize_to_hdf5 class InputsSerializeToHdf5(_Inputs): - """Intermediate class used to connect user inputs to serialize_to_hdf5 operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.serialize_to_hdf5() - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) - >>> my_export_floats = bool() - >>> op.inputs.export_floats.connect(my_export_floats) - >>> my_export_flat_vectors = bool() - >>> op.inputs.export_flat_vectors.connect(my_export_flat_vectors) - >>> my_data1 = dpf.Any() - >>> op.inputs.data1.connect(my_data1) - >>> my_data2 = dpf.Any() - >>> op.inputs.data2.connect(my_data2) + """Intermediate class used to connect user inputs to + serialize_to_hdf5 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serialize_to_hdf5() + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + >>> my_export_floats = bool() + >>> op.inputs.export_floats.connect(my_export_floats) + >>> my_export_flat_vectors = bool() + >>> op.inputs.export_flat_vectors.connect(my_export_flat_vectors) + >>> my_data1 = dpf.() + >>> op.inputs.data1.connect(my_data1) + >>> my_data2 = dpf.() + >>> op.inputs.data2.connect(my_data2) """ + def __init__(self, op: Operator): super().__init__(serialize_to_hdf5._spec().inputs, op) - self._file_path = Input(serialize_to_hdf5._spec().input_pin(0), 0, op, -1) + self._file_path = Input(serialize_to_hdf5._spec().input_pin(0), 0, op, -1) self._inputs.append(self._file_path) - self._export_floats = Input(serialize_to_hdf5._spec().input_pin(1), 1, op, -1) + self._export_floats = Input(serialize_to_hdf5._spec().input_pin(1), 1, op, -1) self._inputs.append(self._export_floats) - self._export_flat_vectors = Input(serialize_to_hdf5._spec().input_pin(2), 2, op, -1) + self._export_flat_vectors = Input( + serialize_to_hdf5._spec().input_pin(2), 2, op, -1 + ) self._inputs.append(self._export_flat_vectors) - self._data1 = Input(serialize_to_hdf5._spec().input_pin(3), 3, op, 0) + self._data1 = Input(serialize_to_hdf5._spec().input_pin(3), 3, op, 0) self._inputs.append(self._data1) - self._data2 = Input(serialize_to_hdf5._spec().input_pin(4), 4, op, 1) + self._data2 = Input(serialize_to_hdf5._spec().input_pin(4), 4, op, 1) self._inputs.append(self._data2) @property def file_path(self): - """Allows to connect file_path input to the operator + """Allows to connect file_path input to the operator. - - pindoc: output file path with .h5 extension + Output file path with .h5 extension Parameters ---------- - my_file_path : str, + my_file_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serialize_to_hdf5() >>> op.inputs.file_path.connect(my_file_path) - >>> #or + >>> # or >>> op.inputs.file_path(my_file_path) - """ return self._file_path @property def export_floats(self): - """Allows to connect export_floats input to the operator + """Allows to connect export_floats input to the operator. - - pindoc: converts double to float to reduce file size (default is true) + Converts double to float to reduce file size + (default is true) Parameters ---------- - my_export_floats : bool, + my_export_floats : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serialize_to_hdf5() >>> op.inputs.export_floats.connect(my_export_floats) - >>> #or + >>> # or >>> op.inputs.export_floats(my_export_floats) - """ return self._export_floats @property def export_flat_vectors(self): - """Allows to connect export_flat_vectors input to the operator + """Allows to connect export_flat_vectors input to the operator. - - pindoc: if true, vectors and matrices data are exported flat (x1,y1,z1,x2,y2,z2..) (default is false) + If true, vectors and matrices data are + exported flat (x1,y1,z1,x2,y2,z2..) + (default is false) Parameters ---------- - my_export_flat_vectors : bool, + my_export_flat_vectors : bool Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serialize_to_hdf5() >>> op.inputs.export_flat_vectors.connect(my_export_flat_vectors) - >>> #or + >>> # or >>> op.inputs.export_flat_vectors(my_export_flat_vectors) - """ return self._export_flat_vectors @property def data1(self): - """Allows to connect data1 input to the operator + """Allows to connect data1 input to the operator. - - pindoc: only the data set explicitly to export is exported + Only the data set explicitly to export is + exported Parameters ---------- - my_data1 : Any, + my_data1 : Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serialize_to_hdf5() >>> op.inputs.data1.connect(my_data1) - >>> #or + >>> # or >>> op.inputs.data1(my_data1) - """ return self._data1 @property def data2(self): - """Allows to connect data2 input to the operator + """Allows to connect data2 input to the operator. - - pindoc: only the data set explicitly to export is exported + Only the data set explicitly to export is + exported Parameters ---------- - my_data2 : Any, + my_data2 : Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serialize_to_hdf5() >>> op.inputs.data2.connect(my_data2) - >>> #or + >>> # or >>> op.inputs.data2(my_data2) - """ return self._data2 -class OutputsSerializeToHdf5(_Outputs): - """Intermediate class used to get outputs from serialize_to_hdf5 operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serialize_to_hdf5() - >>> # Connect inputs : op.inputs. ... +class OutputsSerializeToHdf5(_Outputs): + """Intermediate class used to get outputs from + serialize_to_hdf5 operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serialize_to_hdf5() + >>> # Connect inputs : op.inputs. ... """ + def __init__(self, op: Operator): super().__init__(serialize_to_hdf5._spec().outputs, op) - pass - diff --git a/ansys/dpf/core/operators/serialization/serializer.py b/ansys/dpf/core/operators/serialization/serializer.py index f851a812eb2..216bc3f396d 100644 --- a/ansys/dpf/core/operators/serialization/serializer.py +++ b/ansys/dpf/core/operators/serialization/serializer.py @@ -1,72 +1,117 @@ """ serializer -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "serialization" category -""" class serializer(Operator): """Take any input and serialize them in a file. - available inputs: - - file_path (str) - - any_input1 (Any) - - any_input2 (Any) - - available outputs: - - file_path (str) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.serialization.serializer() - - >>> # Make input connections - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) - >>> my_any_input1 = dpf.Any() - >>> op.inputs.any_input1.connect(my_any_input1) - >>> my_any_input2 = dpf.Any() - >>> op.inputs.any_input2.connect(my_any_input2) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.serializer(file_path=my_file_path,any_input1=my_any_input1,any_input2=my_any_input2) + Parameters + ---------- + file_path : str + any_input1 : Any + Any input + any_input2 : Any + Any input + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.serializer() + + >>> # Make input connections + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + >>> my_any_input1 = dpf.Any() + >>> op.inputs.any_input1.connect(my_any_input1) + >>> my_any_input2 = dpf.Any() + >>> op.inputs.any_input2.connect(my_any_input2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.serializer( + ... file_path=my_file_path, + ... any_input1=my_any_input1, + ... any_input2=my_any_input2, + ... ) + + >>> # Get output data + >>> result_file_path = op.outputs.file_path() + """ - >>> # Get output data - >>> result_file_path = op.outputs.file_path()""" - def __init__(self, file_path=None, any_input1=None, any_input2=None, config=None, server=None): - super().__init__(name="serializer", config = config, server = server) + def __init__( + self, file_path=None, any_input1=None, any_input2=None, config=None, server=None + ): + super().__init__(name="serializer", config=config, server=server) self._inputs = InputsSerializer(self) self._outputs = OutputsSerializer(self) - if file_path !=None: + if file_path is not None: self.inputs.file_path.connect(file_path) - if any_input1 !=None: + if any_input1 is not None: self.inputs.any_input1.connect(any_input1) - if any_input2 !=None: + if any_input2 is not None: self.inputs.any_input2.connect(any_input2) @staticmethod def _spec(): - spec = Specification(description="""Take any input and serialize them in a file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "file_path", type_names=["string"], optional=False, document=""""""), - 1 : PinSpecification(name = "any_input", type_names=["any"], optional=False, document="""any input"""), - 2 : PinSpecification(name = "any_input", type_names=["any"], optional=False, document="""any input""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "file_path", type_names=["string"], optional=False, document="""""")}) + description = """Take any input and serialize them in a file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="file_path", + type_names=["string"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="any_input", + type_names=["any"], + optional=False, + document="""Any input""", + ), + 2: PinSpecification( + name="any_input", + type_names=["any"], + optional=False, + document="""Any input""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="file_path", + type_names=["string"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "serializer") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="serializer", server=server) @property def inputs(self): @@ -74,143 +119,135 @@ def inputs(self): Returns -------- - inputs : InputsSerializer + inputs : InputsSerializer """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSerializer + outputs : OutputsSerializer """ return super().outputs -#internal name: serializer -#scripting name: serializer class InputsSerializer(_Inputs): - """Intermediate class used to connect user inputs to serializer operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.serializer() - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) - >>> my_any_input1 = dpf.Any() - >>> op.inputs.any_input1.connect(my_any_input1) - >>> my_any_input2 = dpf.Any() - >>> op.inputs.any_input2.connect(my_any_input2) + """Intermediate class used to connect user inputs to + serializer operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serializer() + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + >>> my_any_input1 = dpf.Any() + >>> op.inputs.any_input1.connect(my_any_input1) + >>> my_any_input2 = dpf.Any() + >>> op.inputs.any_input2.connect(my_any_input2) """ + def __init__(self, op: Operator): super().__init__(serializer._spec().inputs, op) - self._file_path = Input(serializer._spec().input_pin(0), 0, op, -1) + self._file_path = Input(serializer._spec().input_pin(0), 0, op, -1) self._inputs.append(self._file_path) - self._any_input1 = Input(serializer._spec().input_pin(1), 1, op, 0) + self._any_input1 = Input(serializer._spec().input_pin(1), 1, op, 0) self._inputs.append(self._any_input1) - self._any_input2 = Input(serializer._spec().input_pin(2), 2, op, 1) + self._any_input2 = Input(serializer._spec().input_pin(2), 2, op, 1) self._inputs.append(self._any_input2) @property def file_path(self): - """Allows to connect file_path input to the operator + """Allows to connect file_path input to the operator. Parameters ---------- - my_file_path : str, + my_file_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serializer() >>> op.inputs.file_path.connect(my_file_path) - >>> #or + >>> # or >>> op.inputs.file_path(my_file_path) - """ return self._file_path @property def any_input1(self): - """Allows to connect any_input1 input to the operator + """Allows to connect any_input1 input to the operator. - - pindoc: any input + Any input Parameters ---------- - my_any_input1 : Any, + my_any_input1 : Any Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serializer() >>> op.inputs.any_input1.connect(my_any_input1) - >>> #or + >>> # or >>> op.inputs.any_input1(my_any_input1) - """ return self._any_input1 @property def any_input2(self): - """Allows to connect any_input2 input to the operator + """Allows to connect any_input2 input to the operator. - - pindoc: any input + Any input Parameters ---------- - my_any_input2 : Any, + my_any_input2 : Any Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serializer() >>> op.inputs.any_input2.connect(my_any_input2) - >>> #or + >>> # or >>> op.inputs.any_input2(my_any_input2) - """ return self._any_input2 + class OutputsSerializer(_Outputs): - """Intermediate class used to get outputs from serializer operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.serializer() - >>> # Connect inputs : op.inputs. ... - >>> result_file_path = op.outputs.file_path() + """Intermediate class used to get outputs from + serializer operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serializer() + >>> # Connect inputs : op.inputs. ... + >>> result_file_path = op.outputs.file_path() """ + def __init__(self, op: Operator): super().__init__(serializer._spec().outputs, op) - self._file_path = Output(serializer._spec().output_pin(0), 0, op) + self._file_path = Output(serializer._spec().output_pin(0), 0, op) self._outputs.append(self._file_path) @property def file_path(self): """Allows to get file_path output of the operator - Returns ---------- - my_file_path : str, + my_file_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.serializer() >>> # Connect inputs : op.inputs. ... - >>> result_file_path = op.outputs.file_path() - """ + >>> result_file_path = op.outputs.file_path() + """ # noqa: E501 return self._file_path - diff --git a/ansys/dpf/core/operators/serialization/serializer_to_string.py b/ansys/dpf/core/operators/serialization/serializer_to_string.py new file mode 100644 index 00000000000..41a5e333563 --- /dev/null +++ b/ansys/dpf/core/operators/serialization/serializer_to_string.py @@ -0,0 +1,219 @@ +""" +serializer_to_string +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class serializer_to_string(Operator): + """Take any input and serialize them in a string. + + Parameters + ---------- + any_input1 : Any + Any input + any_input2 : Any + Any input + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.serializer_to_string() + + >>> # Make input connections + >>> my_any_input1 = dpf.Any() + >>> op.inputs.any_input1.connect(my_any_input1) + >>> my_any_input2 = dpf.Any() + >>> op.inputs.any_input2.connect(my_any_input2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.serializer_to_string( + ... any_input1=my_any_input1, + ... any_input2=my_any_input2, + ... ) + + >>> # Get output data + >>> result_serialized_string = op.outputs.serialized_string() + """ + + def __init__(self, any_input1=None, any_input2=None, config=None, server=None): + super().__init__(name="serializer_to_string", config=config, server=server) + self._inputs = InputsSerializerToString(self) + self._outputs = OutputsSerializerToString(self) + if any_input1 is not None: + self.inputs.any_input1.connect(any_input1) + if any_input2 is not None: + self.inputs.any_input2.connect(any_input2) + + @staticmethod + def _spec(): + description = """Take any input and serialize them in a string.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="any_input", + type_names=["any"], + optional=False, + document="""Any input""", + ), + 2: PinSpecification( + name="any_input", + type_names=["any"], + optional=False, + document="""Any input""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="serialized_string", + type_names=["string"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="serializer_to_string", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsSerializerToString + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsSerializerToString + """ + return super().outputs + + +class InputsSerializerToString(_Inputs): + """Intermediate class used to connect user inputs to + serializer_to_string operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serializer_to_string() + >>> my_any_input1 = dpf.Any() + >>> op.inputs.any_input1.connect(my_any_input1) + >>> my_any_input2 = dpf.Any() + >>> op.inputs.any_input2.connect(my_any_input2) + """ + + def __init__(self, op: Operator): + super().__init__(serializer_to_string._spec().inputs, op) + self._any_input1 = Input(serializer_to_string._spec().input_pin(1), 1, op, 0) + self._inputs.append(self._any_input1) + self._any_input2 = Input(serializer_to_string._spec().input_pin(2), 2, op, 1) + self._inputs.append(self._any_input2) + + @property + def any_input1(self): + """Allows to connect any_input1 input to the operator. + + Any input + + Parameters + ---------- + my_any_input1 : Any + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serializer_to_string() + >>> op.inputs.any_input1.connect(my_any_input1) + >>> # or + >>> op.inputs.any_input1(my_any_input1) + """ + return self._any_input1 + + @property + def any_input2(self): + """Allows to connect any_input2 input to the operator. + + Any input + + Parameters + ---------- + my_any_input2 : Any + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serializer_to_string() + >>> op.inputs.any_input2.connect(my_any_input2) + >>> # or + >>> op.inputs.any_input2(my_any_input2) + """ + return self._any_input2 + + +class OutputsSerializerToString(_Outputs): + """Intermediate class used to get outputs from + serializer_to_string operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serializer_to_string() + >>> # Connect inputs : op.inputs. ... + >>> result_serialized_string = op.outputs.serialized_string() + """ + + def __init__(self, op: Operator): + super().__init__(serializer_to_string._spec().outputs, op) + self._serialized_string = Output( + serializer_to_string._spec().output_pin(0), 0, op + ) + self._outputs.append(self._serialized_string) + + @property + def serialized_string(self): + """Allows to get serialized_string output of the operator + + Returns + ---------- + my_serialized_string : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.serializer_to_string() + >>> # Connect inputs : op.inputs. ... + >>> result_serialized_string = op.outputs.serialized_string() + """ # noqa: E501 + return self._serialized_string diff --git a/ansys/dpf/core/operators/serialization/string_deserializer.py b/ansys/dpf/core/operators/serialization/string_deserializer.py new file mode 100644 index 00000000000..46965c5716f --- /dev/null +++ b/ansys/dpf/core/operators/serialization/string_deserializer.py @@ -0,0 +1,210 @@ +""" +string_deserializer +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class string_deserializer(Operator): + """Takes a string generated by the serializer and deserializes it into + DPF's entities. + + Parameters + ---------- + serialized_string : str + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.string_deserializer() + + >>> # Make input connections + >>> my_serialized_string = str() + >>> op.inputs.serialized_string.connect(my_serialized_string) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.string_deserializer( + ... serialized_string=my_serialized_string, + ... ) + + >>> # Get output data + >>> result_any_output1 = op.outputs.any_output1() + >>> result_any_output2 = op.outputs.any_output2() + """ + + def __init__(self, serialized_string=None, config=None, server=None): + super().__init__(name="string_deserializer", config=config, server=server) + self._inputs = InputsStringDeserializer(self) + self._outputs = OutputsStringDeserializer(self) + if serialized_string is not None: + self.inputs.serialized_string.connect(serialized_string) + + @staticmethod + def _spec(): + description = """Takes a string generated by the serializer and deserializes it into + DPF's entities.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="serialized_string", + type_names=["string"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 1: PinSpecification( + name="any_output1", + type_names=["any"], + optional=False, + document="""Number and types of outputs corresponding of + the inputs used in the serialization""", + ), + 2: PinSpecification( + name="any_output2", + type_names=["any"], + optional=False, + document="""Number and types of outputs corresponding of + the inputs used in the serialization""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="string_deserializer", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsStringDeserializer + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsStringDeserializer + """ + return super().outputs + + +class InputsStringDeserializer(_Inputs): + """Intermediate class used to connect user inputs to + string_deserializer operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.string_deserializer() + >>> my_serialized_string = str() + >>> op.inputs.serialized_string.connect(my_serialized_string) + """ + + def __init__(self, op: Operator): + super().__init__(string_deserializer._spec().inputs, op) + self._serialized_string = Input( + string_deserializer._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._serialized_string) + + @property + def serialized_string(self): + """Allows to connect serialized_string input to the operator. + + Parameters + ---------- + my_serialized_string : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.string_deserializer() + >>> op.inputs.serialized_string.connect(my_serialized_string) + >>> # or + >>> op.inputs.serialized_string(my_serialized_string) + """ + return self._serialized_string + + +class OutputsStringDeserializer(_Outputs): + """Intermediate class used to get outputs from + string_deserializer operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.string_deserializer() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output1 = op.outputs.any_output1() + >>> result_any_output2 = op.outputs.any_output2() + """ + + def __init__(self, op: Operator): + super().__init__(string_deserializer._spec().outputs, op) + self._any_output1 = Output(string_deserializer._spec().output_pin(1), 1, op) + self._outputs.append(self._any_output1) + self._any_output2 = Output(string_deserializer._spec().output_pin(2), 2, op) + self._outputs.append(self._any_output2) + + @property + def any_output1(self): + """Allows to get any_output1 output of the operator + + Returns + ---------- + my_any_output1 : Any + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.string_deserializer() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output1 = op.outputs.any_output1() + """ # noqa: E501 + return self._any_output1 + + @property + def any_output2(self): + """Allows to get any_output2 output of the operator + + Returns + ---------- + my_any_output2 : Any + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.string_deserializer() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output2 = op.outputs.any_output2() + """ # noqa: E501 + return self._any_output2 diff --git a/ansys/dpf/core/operators/serialization/vtk_export.py b/ansys/dpf/core/operators/serialization/vtk_export.py index 2cd4aa29ff9..c93f453c6e4 100644 --- a/ansys/dpf/core/operators/serialization/vtk_export.py +++ b/ansys/dpf/core/operators/serialization/vtk_export.py @@ -1,77 +1,136 @@ """ vtk_export -========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "serialization" category -""" class vtk_export(Operator): """Write the input field and fields container into a given vtk path - available inputs: - - file_path (str) - - mesh (MeshedRegion) (optional) - - fields1 (FieldsContainer, Field) - - fields2 (FieldsContainer, Field) - - available outputs: - - - Examples - -------- - >>> from ansys.dpf import core as dpf + Parameters + ---------- + file_path : str + Path with vtk extension were the export + occurs + mesh : MeshedRegion, optional + Necessary if the first field or fields + container don't have a mesh in their + support + fields1 : FieldsContainer or Field + Fields exported + fields2 : FieldsContainer or Field + Fields exported + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.vtk_export() + + >>> # Make input connections + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_fields1 = dpf.FieldsContainer() + >>> op.inputs.fields1.connect(my_fields1) + >>> my_fields2 = dpf.FieldsContainer() + >>> op.inputs.fields2.connect(my_fields2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.vtk_export( + ... file_path=my_file_path, + ... mesh=my_mesh, + ... fields1=my_fields1, + ... fields2=my_fields2, + ... ) - >>> # Instantiate operator - >>> op = dpf.operators.serialization.vtk_export() - - >>> # Make input connections - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_fields1 = dpf.FieldsContainer() - >>> op.inputs.fields1.connect(my_fields1) - >>> my_fields2 = dpf.FieldsContainer() - >>> op.inputs.fields2.connect(my_fields2) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.vtk_export(file_path=my_file_path,mesh=my_mesh,fields1=my_fields1,fields2=my_fields2) + """ - >>> # Get output data""" - def __init__(self, file_path=None, mesh=None, fields1=None, fields2=None, config=None, server=None): - super().__init__(name="vtk_export", config = config, server = server) + def __init__( + self, + file_path=None, + mesh=None, + fields1=None, + fields2=None, + config=None, + server=None, + ): + super().__init__(name="vtk_export", config=config, server=server) self._inputs = InputsVtkExport(self) self._outputs = OutputsVtkExport(self) - if file_path !=None: + if file_path is not None: self.inputs.file_path.connect(file_path) - if mesh !=None: + if mesh is not None: self.inputs.mesh.connect(mesh) - if fields1 !=None: + if fields1 is not None: self.inputs.fields1.connect(fields1) - if fields2 !=None: + if fields2 is not None: self.inputs.fields2.connect(fields2) @staticmethod def _spec(): - spec = Specification(description="""Write the input field and fields container into a given vtk path""", - map_input_pin_spec={ - 0 : PinSpecification(name = "file_path", type_names=["string"], optional=False, document="""path with vtk extension were the export occurs"""), - 1 : PinSpecification(name = "mesh", type_names=["abstract_meshed_region"], optional=True, document="""necessary if the first field or fields container don't have a mesh in their support"""), - 2 : PinSpecification(name = "fields", type_names=["fields_container","field"], optional=False, document="""fields exported"""), - 3 : PinSpecification(name = "fields", type_names=["fields_container","field"], optional=False, document="""fields exported""")}, - map_output_pin_spec={ -}) + description = ( + """Write the input field and fields container into a given vtk path""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="file_path", + type_names=["string"], + optional=False, + document="""Path with vtk extension were the export + occurs""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=True, + document="""Necessary if the first field or fields + container don't have a mesh in their + support""", + ), + 2: PinSpecification( + name="fields", + type_names=["fields_container", "field"], + optional=False, + document="""Fields exported""", + ), + 3: PinSpecification( + name="fields", + type_names=["fields_container", "field"], + optional=False, + document="""Fields exported""", + ), + }, + map_output_pin_spec={}, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "vtk_export") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="vtk_export", server=server) @property def inputs(self): @@ -79,150 +138,144 @@ def inputs(self): Returns -------- - inputs : InputsVtkExport + inputs : InputsVtkExport """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsVtkExport + outputs : OutputsVtkExport """ return super().outputs -#internal name: vtk_export -#scripting name: vtk_export class InputsVtkExport(_Inputs): - """Intermediate class used to connect user inputs to vtk_export operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.vtk_export() - >>> my_file_path = str() - >>> op.inputs.file_path.connect(my_file_path) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_fields1 = dpf.FieldsContainer() - >>> op.inputs.fields1.connect(my_fields1) - >>> my_fields2 = dpf.FieldsContainer() - >>> op.inputs.fields2.connect(my_fields2) + """Intermediate class used to connect user inputs to + vtk_export operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtk_export() + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_fields1 = dpf.FieldsContainer() + >>> op.inputs.fields1.connect(my_fields1) + >>> my_fields2 = dpf.FieldsContainer() + >>> op.inputs.fields2.connect(my_fields2) """ + def __init__(self, op: Operator): super().__init__(vtk_export._spec().inputs, op) - self._file_path = Input(vtk_export._spec().input_pin(0), 0, op, -1) + self._file_path = Input(vtk_export._spec().input_pin(0), 0, op, -1) self._inputs.append(self._file_path) - self._mesh = Input(vtk_export._spec().input_pin(1), 1, op, -1) + self._mesh = Input(vtk_export._spec().input_pin(1), 1, op, -1) self._inputs.append(self._mesh) - self._fields1 = Input(vtk_export._spec().input_pin(2), 2, op, 0) + self._fields1 = Input(vtk_export._spec().input_pin(2), 2, op, 0) self._inputs.append(self._fields1) - self._fields2 = Input(vtk_export._spec().input_pin(3), 3, op, 1) + self._fields2 = Input(vtk_export._spec().input_pin(3), 3, op, 1) self._inputs.append(self._fields2) @property def file_path(self): - """Allows to connect file_path input to the operator + """Allows to connect file_path input to the operator. - - pindoc: path with vtk extension were the export occurs + Path with vtk extension were the export + occurs Parameters ---------- - my_file_path : str, + my_file_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_export() >>> op.inputs.file_path.connect(my_file_path) - >>> #or + >>> # or >>> op.inputs.file_path(my_file_path) - """ return self._file_path @property def mesh(self): - """Allows to connect mesh input to the operator + """Allows to connect mesh input to the operator. - - pindoc: necessary if the first field or fields container don't have a mesh in their support + Necessary if the first field or fields + container don't have a mesh in their + support Parameters ---------- - my_mesh : MeshedRegion, + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_export() >>> op.inputs.mesh.connect(my_mesh) - >>> #or + >>> # or >>> op.inputs.mesh(my_mesh) - """ return self._mesh @property def fields1(self): - """Allows to connect fields1 input to the operator + """Allows to connect fields1 input to the operator. - - pindoc: fields exported + Fields exported Parameters ---------- - my_fields1 : FieldsContainer, Field, + my_fields1 : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_export() >>> op.inputs.fields1.connect(my_fields1) - >>> #or + >>> # or >>> op.inputs.fields1(my_fields1) - """ return self._fields1 @property def fields2(self): - """Allows to connect fields2 input to the operator + """Allows to connect fields2 input to the operator. - - pindoc: fields exported + Fields exported Parameters ---------- - my_fields2 : FieldsContainer, Field, + my_fields2 : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_export() >>> op.inputs.fields2.connect(my_fields2) - >>> #or + >>> # or >>> op.inputs.fields2(my_fields2) - """ return self._fields2 -class OutputsVtkExport(_Outputs): - """Intermediate class used to get outputs from vtk_export operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_export() - >>> # Connect inputs : op.inputs. ... +class OutputsVtkExport(_Outputs): + """Intermediate class used to get outputs from + vtk_export operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtk_export() + >>> # Connect inputs : op.inputs. ... """ + def __init__(self, op: Operator): super().__init__(vtk_export._spec().outputs, op) - pass - diff --git a/ansys/dpf/core/operators/serialization/vtk_to_fields.py b/ansys/dpf/core/operators/serialization/vtk_to_fields.py index 0eccf9a889b..63caebb44e4 100644 --- a/ansys/dpf/core/operators/serialization/vtk_to_fields.py +++ b/ansys/dpf/core/operators/serialization/vtk_to_fields.py @@ -1,72 +1,116 @@ """ vtk_to_fields -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from meshOperatorsCore plugin, from "serialization" category -""" class vtk_to_fields(Operator): """Write a field based on a vtk file. - available inputs: - - field_name (str) (optional) - - streams (StreamsContainer) (optional) - - data_sources (DataSources) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.serialization.vtk_to_fields() - - >>> # Make input connections - >>> my_field_name = str() - >>> op.inputs.field_name.connect(my_field_name) - >>> my_streams = dpf.StreamsContainer() - >>> op.inputs.streams.connect(my_streams) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.serialization.vtk_to_fields(field_name=my_field_name,streams=my_streams,data_sources=my_data_sources) + Parameters + ---------- + field_name : str, optional + Name of the field in the vtk file + streams : StreamsContainer, optional + data_sources : DataSources + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.vtk_to_fields() + + >>> # Make input connections + >>> my_field_name = str() + >>> op.inputs.field_name.connect(my_field_name) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.vtk_to_fields( + ... field_name=my_field_name, + ... streams=my_streams, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, field_name=None, streams=None, data_sources=None, config=None, server=None): - super().__init__(name="vtk::vtk::FieldProvider", config = config, server = server) + def __init__( + self, field_name=None, streams=None, data_sources=None, config=None, server=None + ): + super().__init__(name="vtk::vtk::FieldProvider", config=config, server=server) self._inputs = InputsVtkToFields(self) self._outputs = OutputsVtkToFields(self) - if field_name !=None: + if field_name is not None: self.inputs.field_name.connect(field_name) - if streams !=None: + if streams is not None: self.inputs.streams.connect(streams) - if data_sources !=None: + if data_sources is not None: self.inputs.data_sources.connect(data_sources) @staticmethod def _spec(): - spec = Specification(description="""Write a field based on a vtk file.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field_name", type_names=["string"], optional=True, document="""name of the field in the vtk file"""), - 3 : PinSpecification(name = "streams", type_names=["streams_container"], optional=True, document=""""""), - 4 : PinSpecification(name = "data_sources", type_names=["data_sources"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""fields_container""")}) + description = """Write a field based on a vtk file.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field_name", + type_names=["string"], + optional=True, + document="""Name of the field in the vtk file""", + ), + 3: PinSpecification( + name="streams", + type_names=["streams_container"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""Fields_container""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "vtk::vtk::FieldProvider") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="vtk::vtk::FieldProvider", server=server) @property def inputs(self): @@ -74,143 +118,133 @@ def inputs(self): Returns -------- - inputs : InputsVtkToFields + inputs : InputsVtkToFields """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsVtkToFields + outputs : OutputsVtkToFields """ return super().outputs -#internal name: vtk::vtk::FieldProvider -#scripting name: vtk_to_fields class InputsVtkToFields(_Inputs): - """Intermediate class used to connect user inputs to vtk_to_fields operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.vtk_to_fields() - >>> my_field_name = str() - >>> op.inputs.field_name.connect(my_field_name) - >>> my_streams = dpf.StreamsContainer() - >>> op.inputs.streams.connect(my_streams) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) + """Intermediate class used to connect user inputs to + vtk_to_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtk_to_fields() + >>> my_field_name = str() + >>> op.inputs.field_name.connect(my_field_name) + >>> my_streams = dpf.StreamsContainer() + >>> op.inputs.streams.connect(my_streams) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) """ + def __init__(self, op: Operator): super().__init__(vtk_to_fields._spec().inputs, op) - self._field_name = Input(vtk_to_fields._spec().input_pin(0), 0, op, -1) + self._field_name = Input(vtk_to_fields._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field_name) - self._streams = Input(vtk_to_fields._spec().input_pin(3), 3, op, -1) + self._streams = Input(vtk_to_fields._spec().input_pin(3), 3, op, -1) self._inputs.append(self._streams) - self._data_sources = Input(vtk_to_fields._spec().input_pin(4), 4, op, -1) + self._data_sources = Input(vtk_to_fields._spec().input_pin(4), 4, op, -1) self._inputs.append(self._data_sources) @property def field_name(self): - """Allows to connect field_name input to the operator + """Allows to connect field_name input to the operator. - - pindoc: name of the field in the vtk file + Name of the field in the vtk file Parameters ---------- - my_field_name : str, + my_field_name : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_to_fields() >>> op.inputs.field_name.connect(my_field_name) - >>> #or + >>> # or >>> op.inputs.field_name(my_field_name) - """ return self._field_name @property def streams(self): - """Allows to connect streams input to the operator + """Allows to connect streams input to the operator. Parameters ---------- - my_streams : StreamsContainer, + my_streams : StreamsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_to_fields() >>> op.inputs.streams.connect(my_streams) - >>> #or + >>> # or >>> op.inputs.streams(my_streams) - """ return self._streams @property def data_sources(self): - """Allows to connect data_sources input to the operator + """Allows to connect data_sources input to the operator. Parameters ---------- - my_data_sources : DataSources, + my_data_sources : DataSources Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_to_fields() >>> op.inputs.data_sources.connect(my_data_sources) - >>> #or + >>> # or >>> op.inputs.data_sources(my_data_sources) - """ return self._data_sources + class OutputsVtkToFields(_Outputs): - """Intermediate class used to get outputs from vtk_to_fields operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.serialization.vtk_to_fields() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + vtk_to_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtk_to_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(vtk_to_fields._spec().outputs, op) - self._fields_container = Output(vtk_to_fields._spec().output_pin(0), 0, op) + self._fields_container = Output(vtk_to_fields._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - - - pindoc: fields_container - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.serialization.vtk_to_fields() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/server/grpc_shutdown_server.py b/ansys/dpf/core/operators/server/grpc_shutdown_server.py new file mode 100644 index 00000000000..3b509237ac6 --- /dev/null +++ b/ansys/dpf/core/operators/server/grpc_shutdown_server.py @@ -0,0 +1,151 @@ +""" +grpc_shutdown_server +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class grpc_shutdown_server(Operator): + """Shutdowns dpf's grpc server + + Parameters + ---------- + grpc_stream : StreamsContainer + Dpf streams handling the server + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.server.grpc_shutdown_server() + + >>> # Make input connections + >>> my_grpc_stream = dpf.StreamsContainer() + >>> op.inputs.grpc_stream.connect(my_grpc_stream) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.server.grpc_shutdown_server( + ... grpc_stream=my_grpc_stream, + ... ) + + """ + + def __init__(self, grpc_stream=None, config=None, server=None): + super().__init__(name="grpc_server_shutdown", config=config, server=server) + self._inputs = InputsGrpcShutdownServer(self) + self._outputs = OutputsGrpcShutdownServer(self) + if grpc_stream is not None: + self.inputs.grpc_stream.connect(grpc_stream) + + @staticmethod + def _spec(): + description = """Shutdowns dpf's grpc server""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="grpc_stream", + type_names=["streams_container"], + optional=False, + document="""Dpf streams handling the server""", + ), + }, + map_output_pin_spec={}, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="grpc_server_shutdown", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsGrpcShutdownServer + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsGrpcShutdownServer + """ + return super().outputs + + +class InputsGrpcShutdownServer(_Inputs): + """Intermediate class used to connect user inputs to + grpc_shutdown_server operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_shutdown_server() + >>> my_grpc_stream = dpf.StreamsContainer() + >>> op.inputs.grpc_stream.connect(my_grpc_stream) + """ + + def __init__(self, op: Operator): + super().__init__(grpc_shutdown_server._spec().inputs, op) + self._grpc_stream = Input(grpc_shutdown_server._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._grpc_stream) + + @property + def grpc_stream(self): + """Allows to connect grpc_stream input to the operator. + + Dpf streams handling the server + + Parameters + ---------- + my_grpc_stream : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_shutdown_server() + >>> op.inputs.grpc_stream.connect(my_grpc_stream) + >>> # or + >>> op.inputs.grpc_stream(my_grpc_stream) + """ + return self._grpc_stream + + +class OutputsGrpcShutdownServer(_Outputs): + """Intermediate class used to get outputs from + grpc_shutdown_server operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_shutdown_server() + >>> # Connect inputs : op.inputs. ... + """ + + def __init__(self, op: Operator): + super().__init__(grpc_shutdown_server._spec().outputs, op) diff --git a/ansys/dpf/core/operators/server/grpc_start_server.py b/ansys/dpf/core/operators/server/grpc_start_server.py new file mode 100644 index 00000000000..0a694d51e97 --- /dev/null +++ b/ansys/dpf/core/operators/server/grpc_start_server.py @@ -0,0 +1,367 @@ +""" +grpc_start_server +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class grpc_start_server(Operator): + """Starts a dpf's grpc server (if local) or connect to one and keep it + waiting for requests in a streams. + + Parameters + ---------- + ip : str, optional + If no ip address is put the local ip address + is taken + port : str or int, optional + If no port is put port 50052 is taken + start_in_new_thread : bool, optional + Default is true. with false, this thread will + be waiting for grpc calls and will + not be usable for anything else + should_start_server : bool, optional + If true, the server is assumed to be local + and is started. if false, only a + client (able to send grpc calls) will + be started + data_sources : DataSources, optional + A data sources with result key 'grpc' and + file path port:ip can be used instead + of the input port and ip. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.server.grpc_start_server() + + >>> # Make input connections + >>> my_ip = str() + >>> op.inputs.ip.connect(my_ip) + >>> my_port = str() + >>> op.inputs.port.connect(my_port) + >>> my_start_in_new_thread = bool() + >>> op.inputs.start_in_new_thread.connect(my_start_in_new_thread) + >>> my_should_start_server = bool() + >>> op.inputs.should_start_server.connect(my_should_start_server) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.server.grpc_start_server( + ... ip=my_ip, + ... port=my_port, + ... start_in_new_thread=my_start_in_new_thread, + ... should_start_server=my_should_start_server, + ... data_sources=my_data_sources, + ... ) + + >>> # Get output data + >>> result_grpc_streams = op.outputs.grpc_streams() + """ + + def __init__( + self, + ip=None, + port=None, + start_in_new_thread=None, + should_start_server=None, + data_sources=None, + config=None, + server=None, + ): + super().__init__(name="grpc::stream_provider", config=config, server=server) + self._inputs = InputsGrpcStartServer(self) + self._outputs = OutputsGrpcStartServer(self) + if ip is not None: + self.inputs.ip.connect(ip) + if port is not None: + self.inputs.port.connect(port) + if start_in_new_thread is not None: + self.inputs.start_in_new_thread.connect(start_in_new_thread) + if should_start_server is not None: + self.inputs.should_start_server.connect(should_start_server) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + + @staticmethod + def _spec(): + description = """Starts a dpf's grpc server (if local) or connect to one and keep it + waiting for requests in a streams.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="ip", + type_names=["string"], + optional=True, + document="""If no ip address is put the local ip address + is taken""", + ), + 1: PinSpecification( + name="port", + type_names=["string", "int32"], + optional=True, + document="""If no port is put port 50052 is taken""", + ), + 2: PinSpecification( + name="start_in_new_thread", + type_names=["bool"], + optional=True, + document="""Default is true. with false, this thread will + be waiting for grpc calls and will + not be usable for anything else""", + ), + 3: PinSpecification( + name="should_start_server", + type_names=["bool"], + optional=True, + document="""If true, the server is assumed to be local + and is started. if false, only a + client (able to send grpc calls) will + be started""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=True, + document="""A data sources with result key 'grpc' and + file path port:ip can be used instead + of the input port and ip.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="grpc_streams", + type_names=["streams_container"], + optional=False, + document="""Dpf streams handling the server""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="grpc::stream_provider", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsGrpcStartServer + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsGrpcStartServer + """ + return super().outputs + + +class InputsGrpcStartServer(_Inputs): + """Intermediate class used to connect user inputs to + grpc_start_server operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_start_server() + >>> my_ip = str() + >>> op.inputs.ip.connect(my_ip) + >>> my_port = str() + >>> op.inputs.port.connect(my_port) + >>> my_start_in_new_thread = bool() + >>> op.inputs.start_in_new_thread.connect(my_start_in_new_thread) + >>> my_should_start_server = bool() + >>> op.inputs.should_start_server.connect(my_should_start_server) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + """ + + def __init__(self, op: Operator): + super().__init__(grpc_start_server._spec().inputs, op) + self._ip = Input(grpc_start_server._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._ip) + self._port = Input(grpc_start_server._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._port) + self._start_in_new_thread = Input( + grpc_start_server._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._start_in_new_thread) + self._should_start_server = Input( + grpc_start_server._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._should_start_server) + self._data_sources = Input(grpc_start_server._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + + @property + def ip(self): + """Allows to connect ip input to the operator. + + If no ip address is put the local ip address + is taken + + Parameters + ---------- + my_ip : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_start_server() + >>> op.inputs.ip.connect(my_ip) + >>> # or + >>> op.inputs.ip(my_ip) + """ + return self._ip + + @property + def port(self): + """Allows to connect port input to the operator. + + If no port is put port 50052 is taken + + Parameters + ---------- + my_port : str or int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_start_server() + >>> op.inputs.port.connect(my_port) + >>> # or + >>> op.inputs.port(my_port) + """ + return self._port + + @property + def start_in_new_thread(self): + """Allows to connect start_in_new_thread input to the operator. + + Default is true. with false, this thread will + be waiting for grpc calls and will + not be usable for anything else + + Parameters + ---------- + my_start_in_new_thread : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_start_server() + >>> op.inputs.start_in_new_thread.connect(my_start_in_new_thread) + >>> # or + >>> op.inputs.start_in_new_thread(my_start_in_new_thread) + """ + return self._start_in_new_thread + + @property + def should_start_server(self): + """Allows to connect should_start_server input to the operator. + + If true, the server is assumed to be local + and is started. if false, only a + client (able to send grpc calls) will + be started + + Parameters + ---------- + my_should_start_server : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_start_server() + >>> op.inputs.should_start_server.connect(my_should_start_server) + >>> # or + >>> op.inputs.should_start_server(my_should_start_server) + """ + return self._should_start_server + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + A data sources with result key 'grpc' and + file path port:ip can be used instead + of the input port and ip. + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_start_server() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + +class OutputsGrpcStartServer(_Outputs): + """Intermediate class used to get outputs from + grpc_start_server operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_start_server() + >>> # Connect inputs : op.inputs. ... + >>> result_grpc_streams = op.outputs.grpc_streams() + """ + + def __init__(self, op: Operator): + super().__init__(grpc_start_server._spec().outputs, op) + self._grpc_streams = Output(grpc_start_server._spec().output_pin(0), 0, op) + self._outputs.append(self._grpc_streams) + + @property + def grpc_streams(self): + """Allows to get grpc_streams output of the operator + + Returns + ---------- + my_grpc_streams : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.server.grpc_start_server() + >>> # Connect inputs : op.inputs. ... + >>> result_grpc_streams = op.outputs.grpc_streams() + """ # noqa: E501 + return self._grpc_streams diff --git a/ansys/dpf/core/operators/utility/__init__.py b/ansys/dpf/core/operators/utility/__init__.py index adbfd6bed95..7fa9fdfc5ad 100644 --- a/ansys/dpf/core/operators/utility/__init__.py +++ b/ansys/dpf/core/operators/utility/__init__.py @@ -1,3 +1,4 @@ +from .merge_result_infos import merge_result_infos from .field_to_fc import field_to_fc from .html_doc import html_doc from .unitary_field import unitary_field @@ -13,5 +14,21 @@ from .forward import forward from .txt_file_to_dpf import txt_file_to_dpf from .bind_support_fc import bind_support_fc +from .default_value import default_value +from .extract_time_freq import extract_time_freq from .python_generator import python_generator +from .make_overall import make_overall +from .merge_fields_containers import merge_fields_containers +from .merge_scopings import merge_scopings +from .merge_materials import merge_materials +from .merge_property_fields import merge_property_fields +from .remote_workflow_instantiate import remote_workflow_instantiate +from .remote_operator_instantiate import remote_operator_instantiate +from .merge_fields_by_label import merge_fields_by_label +from .merge_scopings_containers import merge_scopings_containers +from .merge_meshes import merge_meshes +from .merge_time_freq_supports import merge_time_freq_supports +from .merge_fields import merge_fields +from .merge_supports import merge_supports +from .merge_meshes_containers import merge_meshes_containers from .change_shell_layers import change_shell_layers diff --git a/ansys/dpf/core/operators/utility/bind_support.py b/ansys/dpf/core/operators/utility/bind_support.py index af959d24414..96c4d47741f 100644 --- a/ansys/dpf/core/operators/utility/bind_support.py +++ b/ansys/dpf/core/operators/utility/bind_support.py @@ -1,66 +1,105 @@ """ bind_support -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class bind_support(Operator): """Tie a support to a field. - available inputs: - - field (Field, FieldsContainer) - - support (MeshedRegion, AbstractFieldSupport) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + support : MeshedRegion or AbstractFieldSupport + Meshed region or a support of the field + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.bind_support() - >>> # Instantiate operator - >>> op = dpf.operators.utility.bind_support() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_support = dpf.MeshedRegion() + >>> op.inputs.support.connect(my_support) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_support = dpf.MeshedRegion() - >>> op.inputs.support.connect(my_support) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.bind_support( + ... field=my_field, + ... support=my_support, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.bind_support(field=my_field,support=my_support) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, support=None, config=None, server=None): - super().__init__(name="BindSupport", config = config, server = server) + super().__init__(name="BindSupport", config=config, server=server) self._inputs = InputsBindSupport(self) self._outputs = OutputsBindSupport(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if support !=None: + if support is not None: self.inputs.support.connect(support) @staticmethod def _spec(): - spec = Specification(description="""Tie a support to a field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected"""), - 1 : PinSpecification(name = "support", type_names=["abstract_meshed_region","abstract_field_support"], optional=False, document="""meshed region or a support of the field""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Tie a support to a field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="support", + type_names=["abstract_meshed_region", "abstract_field_support"], + optional=False, + document="""Meshed region or a support of the field""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "BindSupport") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="BindSupport", server=server) @property def inputs(self): @@ -68,119 +107,114 @@ def inputs(self): Returns -------- - inputs : InputsBindSupport + inputs : InputsBindSupport """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsBindSupport + outputs : OutputsBindSupport """ return super().outputs -#internal name: BindSupport -#scripting name: bind_support class InputsBindSupport(_Inputs): - """Intermediate class used to connect user inputs to bind_support operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.bind_support() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_support = dpf.MeshedRegion() - >>> op.inputs.support.connect(my_support) + """Intermediate class used to connect user inputs to + bind_support operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.bind_support() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_support = dpf.MeshedRegion() + >>> op.inputs.support.connect(my_support) """ + def __init__(self, op: Operator): super().__init__(bind_support._spec().inputs, op) - self._field = Input(bind_support._spec().input_pin(0), 0, op, -1) + self._field = Input(bind_support._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._support = Input(bind_support._spec().input_pin(1), 1, op, -1) + self._support = Input(bind_support._spec().input_pin(1), 1, op, -1) self._inputs.append(self._support) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.bind_support() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def support(self): - """Allows to connect support input to the operator + """Allows to connect support input to the operator. - - pindoc: meshed region or a support of the field + Meshed region or a support of the field Parameters ---------- - my_support : MeshedRegion, AbstractFieldSupport, + my_support : MeshedRegion or AbstractFieldSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.bind_support() >>> op.inputs.support.connect(my_support) - >>> #or + >>> # or >>> op.inputs.support(my_support) - """ return self._support + class OutputsBindSupport(_Outputs): - """Intermediate class used to get outputs from bind_support operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.bind_support() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + bind_support operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.bind_support() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(bind_support._spec().outputs, op) - self._field = Output(bind_support._spec().output_pin(0), 0, op) + self._field = Output(bind_support._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.bind_support() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/utility/bind_support_fc.py b/ansys/dpf/core/operators/utility/bind_support_fc.py index 7c65afdacf7..31de72d0734 100644 --- a/ansys/dpf/core/operators/utility/bind_support_fc.py +++ b/ansys/dpf/core/operators/utility/bind_support_fc.py @@ -1,66 +1,102 @@ """ bind_support_fc =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class bind_support_fc(Operator): """Tie a support to a fields container. - available inputs: - - fields_container (FieldsContainer) - - support (MeshedRegion, AbstractFieldSupport) + Parameters + ---------- + fields_container : FieldsContainer + support : MeshedRegion or AbstractFieldSupport + Meshed region or a support of the field + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.bind_support_fc() - >>> # Instantiate operator - >>> op = dpf.operators.utility.bind_support_fc() + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_support = dpf.MeshedRegion() + >>> op.inputs.support.connect(my_support) - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_support = dpf.MeshedRegion() - >>> op.inputs.support.connect(my_support) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.bind_support_fc( + ... fields_container=my_fields_container, + ... support=my_support, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.bind_support_fc(fields_container=my_fields_container,support=my_support) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields_container=None, support=None, config=None, server=None): - super().__init__(name="BindSupportFC", config = config, server = server) + super().__init__(name="BindSupportFC", config=config, server=server) self._inputs = InputsBindSupportFc(self) self._outputs = OutputsBindSupportFc(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if support !=None: + if support is not None: self.inputs.support.connect(support) @staticmethod def _spec(): - spec = Specification(description="""Tie a support to a fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "support", type_names=["abstract_meshed_region","abstract_field_support"], optional=False, document="""meshed region or a support of the field""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Tie a support to a fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="support", + type_names=["abstract_meshed_region", "abstract_field_support"], + optional=False, + document="""Meshed region or a support of the field""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "BindSupportFC") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="BindSupportFC", server=server) @property def inputs(self): @@ -68,117 +104,111 @@ def inputs(self): Returns -------- - inputs : InputsBindSupportFc + inputs : InputsBindSupportFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsBindSupportFc + outputs : OutputsBindSupportFc """ return super().outputs -#internal name: BindSupportFC -#scripting name: bind_support_fc class InputsBindSupportFc(_Inputs): - """Intermediate class used to connect user inputs to bind_support_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.bind_support_fc() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_support = dpf.MeshedRegion() - >>> op.inputs.support.connect(my_support) + """Intermediate class used to connect user inputs to + bind_support_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.bind_support_fc() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_support = dpf.MeshedRegion() + >>> op.inputs.support.connect(my_support) """ + def __init__(self, op: Operator): super().__init__(bind_support_fc._spec().inputs, op) - self._fields_container = Input(bind_support_fc._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(bind_support_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._support = Input(bind_support_fc._spec().input_pin(1), 1, op, -1) + self._support = Input(bind_support_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._support) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.bind_support_fc() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def support(self): - """Allows to connect support input to the operator + """Allows to connect support input to the operator. - - pindoc: meshed region or a support of the field + Meshed region or a support of the field Parameters ---------- - my_support : MeshedRegion, AbstractFieldSupport, + my_support : MeshedRegion or AbstractFieldSupport Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.bind_support_fc() >>> op.inputs.support.connect(my_support) - >>> #or + >>> # or >>> op.inputs.support(my_support) - """ return self._support + class OutputsBindSupportFc(_Outputs): - """Intermediate class used to get outputs from bind_support_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.bind_support_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + bind_support_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.bind_support_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(bind_support_fc._spec().outputs, op) - self._fields_container = Output(bind_support_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(bind_support_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.bind_support_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/utility/change_location.py b/ansys/dpf/core/operators/utility/change_location.py index 012560e1563..872cf835006 100644 --- a/ansys/dpf/core/operators/utility/change_location.py +++ b/ansys/dpf/core/operators/utility/change_location.py @@ -1,66 +1,104 @@ """ change_location =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class change_location(Operator): """change the location of a field. - available inputs: - - field (Field) - - new_location (str) + Parameters + ---------- + field : Field + new_location : str + New location of the output field ex 'nodal', + 'elementalnodal', 'elemental'... + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.change_location() - >>> # Instantiate operator - >>> op = dpf.operators.utility.change_location() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_new_location = str() + >>> op.inputs.new_location.connect(my_new_location) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_new_location = str() - >>> op.inputs.new_location.connect(my_new_location) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.change_location( + ... field=my_field, + ... new_location=my_new_location, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.change_location(field=my_field,new_location=my_new_location) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, new_location=None, config=None, server=None): - super().__init__(name="change_location", config = config, server = server) + super().__init__(name="change_location", config=config, server=server) self._inputs = InputsChangeLocation(self) self._outputs = OutputsChangeLocation(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if new_location !=None: + if new_location is not None: self.inputs.new_location.connect(new_location) @staticmethod def _spec(): - spec = Specification(description="""change the location of a field.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document=""""""), - 1 : PinSpecification(name = "new_location", type_names=["string"], optional=False, document="""new location of the output field ex 'Nodal', 'ElementalNodal', 'Elemental'...""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """change the location of a field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="new_location", + type_names=["string"], + optional=False, + document="""New location of the output field ex 'nodal', + 'elementalnodal', 'elemental'...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "change_location") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="change_location", server=server) @property def inputs(self): @@ -68,117 +106,112 @@ def inputs(self): Returns -------- - inputs : InputsChangeLocation + inputs : InputsChangeLocation """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsChangeLocation + outputs : OutputsChangeLocation """ return super().outputs -#internal name: change_location -#scripting name: change_location class InputsChangeLocation(_Inputs): - """Intermediate class used to connect user inputs to change_location operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.change_location() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_new_location = str() - >>> op.inputs.new_location.connect(my_new_location) + """Intermediate class used to connect user inputs to + change_location operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.change_location() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_new_location = str() + >>> op.inputs.new_location.connect(my_new_location) """ + def __init__(self, op: Operator): super().__init__(change_location._spec().inputs, op) - self._field = Input(change_location._spec().input_pin(0), 0, op, -1) + self._field = Input(change_location._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._new_location = Input(change_location._spec().input_pin(1), 1, op, -1) + self._new_location = Input(change_location._spec().input_pin(1), 1, op, -1) self._inputs.append(self._new_location) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.change_location() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def new_location(self): - """Allows to connect new_location input to the operator + """Allows to connect new_location input to the operator. - - pindoc: new location of the output field ex 'Nodal', 'ElementalNodal', 'Elemental'... + New location of the output field ex 'nodal', + 'elementalnodal', 'elemental'... Parameters ---------- - my_new_location : str, + my_new_location : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.change_location() >>> op.inputs.new_location.connect(my_new_location) - >>> #or + >>> # or >>> op.inputs.new_location(my_new_location) - """ return self._new_location + class OutputsChangeLocation(_Outputs): - """Intermediate class used to get outputs from change_location operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.change_location() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + change_location operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.change_location() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(change_location._spec().outputs, op) - self._field = Output(change_location._spec().output_pin(0), 0, op) + self._field = Output(change_location._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.change_location() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/utility/change_shell_layers.py b/ansys/dpf/core/operators/utility/change_shell_layers.py index e891a674bea..db48f72826b 100644 --- a/ansys/dpf/core/operators/utility/change_shell_layers.py +++ b/ansys/dpf/core/operators/utility/change_shell_layers.py @@ -1,66 +1,109 @@ """ change_shell_layers -=================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.FEMutils plugin, from "utility" category -""" class change_shell_layers(Operator): - """Extract the expected shell layers from the input fields, if the fields contain only one layer then it returns the input fields - - available inputs: - - fields_container (FieldsContainer, Field) - - e_shell_layer (int) - - available outputs: - - fields_container (FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.utility.change_shell_layers() - - >>> # Make input connections - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_e_shell_layer = int() - >>> op.inputs.e_shell_layer.connect(my_e_shell_layer) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.change_shell_layers(fields_container=my_fields_container,e_shell_layer=my_e_shell_layer) + """Extract the expected shell layers from the input fields, if the fields + contain only one layer then it returns the input fields + + Parameters + ---------- + fields_container : FieldsContainer or Field + e_shell_layer : int + 0:top, 1: bottom, 2: bottomtop, 3:mid, + 4:bottomtopmid + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.change_shell_layers() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_e_shell_layer = int() + >>> op.inputs.e_shell_layer.connect(my_e_shell_layer) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.change_shell_layers( + ... fields_container=my_fields_container, + ... e_shell_layer=my_e_shell_layer, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" - def __init__(self, fields_container=None, e_shell_layer=None, config=None, server=None): - super().__init__(name="change_shellLayers", config = config, server = server) + def __init__( + self, fields_container=None, e_shell_layer=None, config=None, server=None + ): + super().__init__(name="change_shellLayers", config=config, server=server) self._inputs = InputsChangeShellLayers(self) self._outputs = OutputsChangeShellLayers(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if e_shell_layer !=None: + if e_shell_layer is not None: self.inputs.e_shell_layer.connect(e_shell_layer) @staticmethod def _spec(): - spec = Specification(description="""Extract the expected shell layers from the input fields, if the fields contain only one layer then it returns the input fields""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container","field"], optional=False, document=""""""), - 1 : PinSpecification(name = "e_shell_layer", type_names=["int32"], optional=False, document="""0:Top, 1: Bottom, 2: BottomTop, 3:Mid, 4:BottomTopMid""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Extract the expected shell layers from the input fields, if the fields + contain only one layer then it returns the input fields""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container", "field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="e_shell_layer", + type_names=["int32", "enum dataProcessing::EShellLayers"], + optional=False, + document="""0:top, 1: bottom, 2: bottomtop, 3:mid, + 4:bottomtopmid""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container", "field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "change_shellLayers") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="change_shellLayers", server=server) @property def inputs(self): @@ -68,117 +111,111 @@ def inputs(self): Returns -------- - inputs : InputsChangeShellLayers + inputs : InputsChangeShellLayers """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsChangeShellLayers + outputs : OutputsChangeShellLayers """ return super().outputs -#internal name: change_shellLayers -#scripting name: change_shell_layers class InputsChangeShellLayers(_Inputs): - """Intermediate class used to connect user inputs to change_shell_layers operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.change_shell_layers() - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_e_shell_layer = int() - >>> op.inputs.e_shell_layer.connect(my_e_shell_layer) + """Intermediate class used to connect user inputs to + change_shell_layers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.change_shell_layers() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_e_shell_layer = int() + >>> op.inputs.e_shell_layer.connect(my_e_shell_layer) """ + def __init__(self, op: Operator): super().__init__(change_shell_layers._spec().inputs, op) - self._fields_container = Input(change_shell_layers._spec().input_pin(0), 0, op, -1) + self._fields_container = Input( + change_shell_layers._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._fields_container) - self._e_shell_layer = Input(change_shell_layers._spec().input_pin(1), 1, op, -1) + self._e_shell_layer = Input(change_shell_layers._spec().input_pin(1), 1, op, -1) self._inputs.append(self._e_shell_layer) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. Parameters ---------- - my_fields_container : FieldsContainer, Field, + my_fields_container : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.change_shell_layers() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def e_shell_layer(self): - """Allows to connect e_shell_layer input to the operator + """Allows to connect e_shell_layer input to the operator. - - pindoc: 0:Top, 1: Bottom, 2: BottomTop, 3:Mid, 4:BottomTopMid + 0:top, 1: bottom, 2: bottomtop, 3:mid, + 4:bottomtopmid Parameters ---------- - my_e_shell_layer : int, + my_e_shell_layer : int Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.change_shell_layers() >>> op.inputs.e_shell_layer.connect(my_e_shell_layer) - >>> #or + >>> # or >>> op.inputs.e_shell_layer(my_e_shell_layer) - """ return self._e_shell_layer + class OutputsChangeShellLayers(_Outputs): - """Intermediate class used to get outputs from change_shell_layers operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.change_shell_layers() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + change_shell_layers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.change_shell_layers() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(change_shell_layers._spec().outputs, op) - self._fields_container = Output(change_shell_layers._spec().output_pin(0), 0, op) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - - Returns - ---------- - my_fields_container : FieldsContainer, - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.change_shell_layers() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - return self._fields_container - + self.fields_container_as_fields_container = Output( + _modify_output_spec_with_one_type( + change_shell_layers._spec().output_pin(0), "fields_container" + ), + 0, + op, + ) + self._outputs.append(self.fields_container_as_fields_container) + self.fields_container_as_field = Output( + _modify_output_spec_with_one_type( + change_shell_layers._spec().output_pin(0), "field" + ), + 0, + op, + ) + self._outputs.append(self.fields_container_as_field) diff --git a/ansys/dpf/core/operators/utility/default_value.py b/ansys/dpf/core/operators/utility/default_value.py new file mode 100644 index 00000000000..e95a58ce0d0 --- /dev/null +++ b/ansys/dpf/core/operators/utility/default_value.py @@ -0,0 +1,212 @@ +""" +default_value +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class default_value(Operator): + """default return value from input pin 1 to output pin 0 if there is + nothing on input pin 0. + + Parameters + ---------- + forced_value : optional + default_value : + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.default_value() + + >>> # Make input connections + >>> my_forced_value = dpf.() + >>> op.inputs.forced_value.connect(my_forced_value) + >>> my_default_value = dpf.() + >>> op.inputs.default_value.connect(my_default_value) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.default_value( + ... forced_value=my_forced_value, + ... default_value=my_default_value, + ... ) + + >>> # Get output data + >>> result_output = op.outputs.output() + """ + + def __init__(self, forced_value=None, default_value=None, config=None, server=None): + super().__init__(name="default_value", config=config, server=server) + self._inputs = InputsDefaultValue(self) + self._outputs = OutputsDefaultValue(self) + if forced_value is not None: + self.inputs.forced_value.connect(forced_value) + if default_value is not None: + self.inputs.default_value.connect(default_value) + + @staticmethod + def _spec(): + description = """default return value from input pin 1 to output pin 0 if there is + nothing on input pin 0.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="forced_value", + type_names=["any"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="default_value", + type_names=["any"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="output", + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="default_value", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsDefaultValue + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsDefaultValue + """ + return super().outputs + + +class InputsDefaultValue(_Inputs): + """Intermediate class used to connect user inputs to + default_value operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.default_value() + >>> my_forced_value = dpf.() + >>> op.inputs.forced_value.connect(my_forced_value) + >>> my_default_value = dpf.() + >>> op.inputs.default_value.connect(my_default_value) + """ + + def __init__(self, op: Operator): + super().__init__(default_value._spec().inputs, op) + self._forced_value = Input(default_value._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._forced_value) + self._default_value = Input(default_value._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._default_value) + + @property + def forced_value(self): + """Allows to connect forced_value input to the operator. + + Parameters + ---------- + my_forced_value : + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.default_value() + >>> op.inputs.forced_value.connect(my_forced_value) + >>> # or + >>> op.inputs.forced_value(my_forced_value) + """ + return self._forced_value + + @property + def default_value(self): + """Allows to connect default_value input to the operator. + + Parameters + ---------- + my_default_value : + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.default_value() + >>> op.inputs.default_value.connect(my_default_value) + >>> # or + >>> op.inputs.default_value(my_default_value) + """ + return self._default_value + + +class OutputsDefaultValue(_Outputs): + """Intermediate class used to get outputs from + default_value operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.default_value() + >>> # Connect inputs : op.inputs. ... + >>> result_output = op.outputs.output() + """ + + def __init__(self, op: Operator): + super().__init__(default_value._spec().outputs, op) + self._output = Output(default_value._spec().output_pin(0), 0, op) + self._outputs.append(self._output) + + @property + def output(self): + """Allows to get output output of the operator + + Returns + ---------- + my_output : + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.default_value() + >>> # Connect inputs : op.inputs. ... + >>> result_output = op.outputs.output() + """ # noqa: E501 + return self._output diff --git a/ansys/dpf/core/operators/utility/extract_field.py b/ansys/dpf/core/operators/utility/extract_field.py index 3ce58cf35ce..c74f979cd57 100644 --- a/ansys/dpf/core/operators/utility/extract_field.py +++ b/ansys/dpf/core/operators/utility/extract_field.py @@ -1,66 +1,107 @@ """ extract_field -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class extract_field(Operator): - """Extract the fields at the indeces defined in the vector (in 1) form the fields container (in:0). - - available inputs: - - fields_container (Field, FieldsContainer) - - indeces (list) (optional) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.utility.extract_field() - - >>> # Make input connections - >>> my_fields_container = dpf.Field() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_indeces = dpf.list() - >>> op.inputs.indeces.connect(my_indeces) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.extract_field(fields_container=my_fields_container,indeces=my_indeces) + """Extract the fields at the indeces defined in the vector (in 1) form + the fields container (in:0). + + Parameters + ---------- + fields_container : Field or FieldsContainer + If a field is in input, it is passed on as + output + indeces : optional + Default is the first field + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.extract_field() + + >>> # Make input connections + >>> my_fields_container = dpf.Field() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_indeces = dpf.() + >>> op.inputs.indeces.connect(my_indeces) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.extract_field( + ... fields_container=my_fields_container, + ... indeces=my_indeces, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, fields_container=None, indeces=None, config=None, server=None): - super().__init__(name="ExtractFromFC", config = config, server = server) + super().__init__(name="ExtractFromFC", config=config, server=server) self._inputs = InputsExtractField(self) self._outputs = OutputsExtractField(self) - if fields_container !=None: + if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if indeces !=None: + if indeces is not None: self.inputs.indeces.connect(indeces) @staticmethod def _spec(): - spec = Specification(description="""Extract the fields at the indeces defined in the vector (in 1) form the fields container (in:0).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["field","fields_container"], optional=False, document="""if a field is in input, it is passed on as output"""), - 1 : PinSpecification(name = "indeces", type_names=["vector"], optional=True, document="""default is the first field""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Extract the fields at the indeces defined in the vector (in 1) form + the fields container (in:0).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["field", "fields_container"], + optional=False, + document="""If a field is in input, it is passed on as + output""", + ), + 1: PinSpecification( + name="indeces", + type_names=["vector"], + optional=True, + document="""Default is the first field""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "ExtractFromFC") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="ExtractFromFC", server=server) @property def inputs(self): @@ -68,119 +109,114 @@ def inputs(self): Returns -------- - inputs : InputsExtractField + inputs : InputsExtractField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsExtractField + outputs : OutputsExtractField """ return super().outputs -#internal name: ExtractFromFC -#scripting name: extract_field class InputsExtractField(_Inputs): - """Intermediate class used to connect user inputs to extract_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.extract_field() - >>> my_fields_container = dpf.Field() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_indeces = dpf.list() - >>> op.inputs.indeces.connect(my_indeces) + """Intermediate class used to connect user inputs to + extract_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.extract_field() + >>> my_fields_container = dpf.Field() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_indeces = dpf.() + >>> op.inputs.indeces.connect(my_indeces) """ + def __init__(self, op: Operator): super().__init__(extract_field._spec().inputs, op) - self._fields_container = Input(extract_field._spec().input_pin(0), 0, op, -1) + self._fields_container = Input(extract_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields_container) - self._indeces = Input(extract_field._spec().input_pin(1), 1, op, -1) + self._indeces = Input(extract_field._spec().input_pin(1), 1, op, -1) self._inputs.append(self._indeces) @property def fields_container(self): - """Allows to connect fields_container input to the operator + """Allows to connect fields_container input to the operator. - - pindoc: if a field is in input, it is passed on as output + If a field is in input, it is passed on as + output Parameters ---------- - my_fields_container : Field, FieldsContainer, + my_fields_container : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.extract_field() >>> op.inputs.fields_container.connect(my_fields_container) - >>> #or + >>> # or >>> op.inputs.fields_container(my_fields_container) - """ return self._fields_container @property def indeces(self): - """Allows to connect indeces input to the operator + """Allows to connect indeces input to the operator. - - pindoc: default is the first field + Default is the first field Parameters ---------- - my_indeces : list, + my_indeces : Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.extract_field() >>> op.inputs.indeces.connect(my_indeces) - >>> #or + >>> # or >>> op.inputs.indeces(my_indeces) - """ return self._indeces + class OutputsExtractField(_Outputs): - """Intermediate class used to get outputs from extract_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.extract_field() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + extract_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.extract_field() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(extract_field._spec().outputs, op) - self._field = Output(extract_field._spec().output_pin(0), 0, op) + self._field = Output(extract_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.extract_field() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/utility/extract_time_freq.py b/ansys/dpf/core/operators/utility/extract_time_freq.py new file mode 100644 index 00000000000..30e67b72af2 --- /dev/null +++ b/ansys/dpf/core/operators/utility/extract_time_freq.py @@ -0,0 +1,213 @@ +""" +extract_time_freq +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class extract_time_freq(Operator): + """Extract modes from a time freq support + + Parameters + ---------- + time_freq_support : TimeFreqSupport + set_id : int + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.extract_time_freq() + + >>> # Make input connections + >>> my_time_freq_support = dpf.TimeFreqSupport() + >>> op.inputs.time_freq_support.connect(my_time_freq_support) + >>> my_set_id = int() + >>> op.inputs.set_id.connect(my_set_id) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.extract_time_freq( + ... time_freq_support=my_time_freq_support, + ... set_id=my_set_id, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, time_freq_support=None, set_id=None, config=None, server=None): + super().__init__(name="extract_time_freq", config=config, server=server) + self._inputs = InputsExtractTimeFreq(self) + self._outputs = OutputsExtractTimeFreq(self) + if time_freq_support is not None: + self.inputs.time_freq_support.connect(time_freq_support) + if set_id is not None: + self.inputs.set_id.connect(set_id) + + @staticmethod + def _spec(): + description = """Extract modes from a time freq support""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_freq_support", + type_names=["time_freq_support"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="set_id", + type_names=["int32", "vector"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="extract_time_freq", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsExtractTimeFreq + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsExtractTimeFreq + """ + return super().outputs + + +class InputsExtractTimeFreq(_Inputs): + """Intermediate class used to connect user inputs to + extract_time_freq operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.extract_time_freq() + >>> my_time_freq_support = dpf.TimeFreqSupport() + >>> op.inputs.time_freq_support.connect(my_time_freq_support) + >>> my_set_id = int() + >>> op.inputs.set_id.connect(my_set_id) + """ + + def __init__(self, op: Operator): + super().__init__(extract_time_freq._spec().inputs, op) + self._time_freq_support = Input( + extract_time_freq._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._time_freq_support) + self._set_id = Input(extract_time_freq._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._set_id) + + @property + def time_freq_support(self): + """Allows to connect time_freq_support input to the operator. + + Parameters + ---------- + my_time_freq_support : TimeFreqSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.extract_time_freq() + >>> op.inputs.time_freq_support.connect(my_time_freq_support) + >>> # or + >>> op.inputs.time_freq_support(my_time_freq_support) + """ + return self._time_freq_support + + @property + def set_id(self): + """Allows to connect set_id input to the operator. + + Parameters + ---------- + my_set_id : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.extract_time_freq() + >>> op.inputs.set_id.connect(my_set_id) + >>> # or + >>> op.inputs.set_id(my_set_id) + """ + return self._set_id + + +class OutputsExtractTimeFreq(_Outputs): + """Intermediate class used to get outputs from + extract_time_freq operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.extract_time_freq() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(extract_time_freq._spec().outputs, op) + self._field = Output(extract_time_freq._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.extract_time_freq() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/utility/field_to_fc.py b/ansys/dpf/core/operators/utility/field_to_fc.py index dbf50ec1797..2fe12f759cf 100644 --- a/ansys/dpf/core/operators/utility/field_to_fc.py +++ b/ansys/dpf/core/operators/utility/field_to_fc.py @@ -1,60 +1,92 @@ """ field_to_fc -=========== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class field_to_fc(Operator): """Create a field container containing the field in input. - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + If a fields container is set in input, it is + pass on as output. + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.field_to_fc() - >>> # Instantiate operator - >>> op = dpf.operators.utility.field_to_fc() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.field_to_fc( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.field_to_fc(field=my_field) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="InjectToFieldContainer", config = config, server = server) + super().__init__(name="InjectToFieldContainer", config=config, server=server) self._inputs = InputsFieldToFc(self) self._outputs = OutputsFieldToFc(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Create a field container containing the field in input.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""if a fields container is set in input, it is pass on as output.""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Create a field container containing the field in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""If a fields container is set in input, it is + pass on as output.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "InjectToFieldContainer") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="InjectToFieldContainer", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsFieldToFc + inputs : InputsFieldToFc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsFieldToFc + outputs : OutputsFieldToFc """ return super().outputs -#internal name: InjectToFieldContainer -#scripting name: field_to_fc class InputsFieldToFc(_Inputs): - """Intermediate class used to connect user inputs to field_to_fc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.field_to_fc() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + field_to_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.field_to_fc() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(field_to_fc._spec().inputs, op) - self._field = Input(field_to_fc._spec().input_pin(0), 0, op, -1) + self._field = Input(field_to_fc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: if a fields container is set in input, it is pass on as output. + If a fields container is set in input, it is + pass on as output. Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.field_to_fc() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsFieldToFc(_Outputs): - """Intermediate class used to get outputs from field_to_fc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.field_to_fc() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + field_to_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.field_to_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(field_to_fc._spec().outputs, op) - self._fields_container = Output(field_to_fc._spec().output_pin(0), 0, op) + self._fields_container = Output(field_to_fc._spec().output_pin(0), 0, op) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.field_to_fc() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/utility/forward.py b/ansys/dpf/core/operators/utility/forward.py index 7c861017b90..87551db2c82 100644 --- a/ansys/dpf/core/operators/utility/forward.py +++ b/ansys/dpf/core/operators/utility/forward.py @@ -1,60 +1,90 @@ """ forward -======= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class forward(Operator): """Return all the inputs as outputs. - available inputs: - - any (Any) + Parameters + ---------- + any : Any + Any type of input + - available outputs: - - any () + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.forward() - >>> # Instantiate operator - >>> op = dpf.operators.utility.forward() + >>> # Make input connections + >>> my_any = dpf.Any() + >>> op.inputs.any.connect(my_any) - >>> # Make input connections - >>> my_any = dpf.Any() - >>> op.inputs.any.connect(my_any) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.forward( + ... any=my_any, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.forward(any=my_any) + >>> # Get output data + >>> result_any = op.outputs.any() + """ - >>> # Get output data - >>> result_any = op.outputs.any()""" def __init__(self, any=None, config=None, server=None): - super().__init__(name="forward", config = config, server = server) + super().__init__(name="forward", config=config, server=server) self._inputs = InputsForward(self) self._outputs = OutputsForward(self) - if any !=None: + if any is not None: self.inputs.any.connect(any) @staticmethod def _spec(): - spec = Specification(description="""Return all the inputs as outputs.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "any", type_names=["any"], optional=False, document="""any type of input""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "any", type_names=[], optional=False, document="""same types as inputs""")}) + description = """Return all the inputs as outputs.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="any", + type_names=["any"], + optional=False, + document="""Any type of input""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="any", + type_names=["any"], + optional=False, + document="""Same types as inputs""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "forward") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="forward", server=server) @property def inputs(self): @@ -62,72 +92,89 @@ def inputs(self): Returns -------- - inputs : InputsForward + inputs : InputsForward """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsForward + outputs : OutputsForward """ return super().outputs -#internal name: forward -#scripting name: forward class InputsForward(_Inputs): - """Intermediate class used to connect user inputs to forward operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.forward() - >>> my_any = dpf.Any() - >>> op.inputs.any.connect(my_any) + """Intermediate class used to connect user inputs to + forward operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward() + >>> my_any = dpf.Any() + >>> op.inputs.any.connect(my_any) """ + def __init__(self, op: Operator): super().__init__(forward._spec().inputs, op) - self._any = Input(forward._spec().input_pin(0), 0, op, -1) + self._any = Input(forward._spec().input_pin(0), 0, op, -1) self._inputs.append(self._any) @property def any(self): - """Allows to connect any input to the operator + """Allows to connect any input to the operator. - - pindoc: any type of input + Any type of input Parameters ---------- - my_any : Any, + my_any : Any Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward() >>> op.inputs.any.connect(my_any) - >>> #or + >>> # or >>> op.inputs.any(my_any) - """ return self._any -class OutputsForward(_Outputs): - """Intermediate class used to get outputs from forward operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward() - >>> # Connect inputs : op.inputs. ... +class OutputsForward(_Outputs): + """Intermediate class used to get outputs from + forward operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward() + >>> # Connect inputs : op.inputs. ... + >>> result_any = op.outputs.any() """ + def __init__(self, op: Operator): super().__init__(forward._spec().outputs, op) - pass + self._any = Output(forward._spec().output_pin(0), 0, op) + self._outputs.append(self._any) + + @property + def any(self): + """Allows to get any output of the operator + + Returns + ---------- + my_any : Any + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward() + >>> # Connect inputs : op.inputs. ... + >>> result_any = op.outputs.any() + """ # noqa: E501 + return self._any diff --git a/ansys/dpf/core/operators/utility/forward_field.py b/ansys/dpf/core/operators/utility/forward_field.py index e2169bc3962..a5bc488cecb 100644 --- a/ansys/dpf/core/operators/utility/forward_field.py +++ b/ansys/dpf/core/operators/utility/forward_field.py @@ -1,60 +1,92 @@ """ forward_field -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class forward_field(Operator): """Return the input field or fields container. - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.forward_field() - >>> # Instantiate operator - >>> op = dpf.operators.utility.forward_field() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.forward_field( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.forward_field(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="forward_field", config = config, server = server) + super().__init__(name="forward_field", config=config, server=server) self._inputs = InputsForwardField(self) self._outputs = OutputsForwardField(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Return the input field or fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Return the input field or fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "forward_field") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="forward_field", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsForwardField + inputs : InputsForwardField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsForwardField + outputs : OutputsForwardField """ return super().outputs -#internal name: forward_field -#scripting name: forward_field class InputsForwardField(_Inputs): - """Intermediate class used to connect user inputs to forward_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.forward_field() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + forward_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward_field() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(forward_field._spec().inputs, op) - self._field = Input(forward_field._spec().input_pin(0), 0, op, -1) + self._field = Input(forward_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward_field() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsForwardField(_Outputs): - """Intermediate class used to get outputs from forward_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.forward_field() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + forward_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward_field() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(forward_field._spec().outputs, op) - self._field = Output(forward_field._spec().output_pin(0), 0, op) + self._field = Output(forward_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward_field() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/utility/forward_fields_container.py b/ansys/dpf/core/operators/utility/forward_fields_container.py index c8c6204226f..aeb9ab3f4cc 100644 --- a/ansys/dpf/core/operators/utility/forward_fields_container.py +++ b/ansys/dpf/core/operators/utility/forward_fields_container.py @@ -1,60 +1,89 @@ """ forward_fields_container -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class forward_fields_container(Operator): """Return the input field or fields container. - available inputs: - - fields (FieldsContainer, Field) + Parameters + ---------- + fields : FieldsContainer or Field + - available outputs: - - fields_container (FieldsContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.forward_fields_container() - >>> # Instantiate operator - >>> op = dpf.operators.utility.forward_fields_container() + >>> # Make input connections + >>> my_fields = dpf.FieldsContainer() + >>> op.inputs.fields.connect(my_fields) - >>> # Make input connections - >>> my_fields = dpf.FieldsContainer() - >>> op.inputs.fields.connect(my_fields) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.forward_fields_container( + ... fields=my_fields, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.forward_fields_container(fields=my_fields) + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container()""" def __init__(self, fields=None, config=None, server=None): - super().__init__(name="forward_fc", config = config, server = server) + super().__init__(name="forward_fc", config=config, server=server) self._inputs = InputsForwardFieldsContainer(self) self._outputs = OutputsForwardFieldsContainer(self) - if fields !=None: + if fields is not None: self.inputs.fields.connect(fields) @staticmethod def _spec(): - spec = Specification(description="""Return the input field or fields container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "fields", type_names=["fields_container","field"], optional=False, document="""""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "fields_container", type_names=["fields_container"], optional=False, document="""""")}) + description = """Return the input field or fields container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields", + type_names=["fields_container", "field"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "forward_fc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="forward_fc", server=server) @property def inputs(self): @@ -62,91 +91,89 @@ def inputs(self): Returns -------- - inputs : InputsForwardFieldsContainer + inputs : InputsForwardFieldsContainer """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsForwardFieldsContainer + outputs : OutputsForwardFieldsContainer """ return super().outputs -#internal name: forward_fc -#scripting name: forward_fields_container class InputsForwardFieldsContainer(_Inputs): - """Intermediate class used to connect user inputs to forward_fields_container operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.forward_fields_container() - >>> my_fields = dpf.FieldsContainer() - >>> op.inputs.fields.connect(my_fields) + """Intermediate class used to connect user inputs to + forward_fields_container operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward_fields_container() + >>> my_fields = dpf.FieldsContainer() + >>> op.inputs.fields.connect(my_fields) """ + def __init__(self, op: Operator): super().__init__(forward_fields_container._spec().inputs, op) - self._fields = Input(forward_fields_container._spec().input_pin(0), 0, op, -1) + self._fields = Input(forward_fields_container._spec().input_pin(0), 0, op, -1) self._inputs.append(self._fields) @property def fields(self): - """Allows to connect fields input to the operator + """Allows to connect fields input to the operator. Parameters ---------- - my_fields : FieldsContainer, Field, + my_fields : FieldsContainer or Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward_fields_container() >>> op.inputs.fields.connect(my_fields) - >>> #or + >>> # or >>> op.inputs.fields(my_fields) - """ return self._fields + class OutputsForwardFieldsContainer(_Outputs): - """Intermediate class used to get outputs from forward_fields_container operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.forward_fields_container() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() + """Intermediate class used to get outputs from + forward_fields_container operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward_fields_container() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() """ + def __init__(self, op: Operator): super().__init__(forward_fields_container._spec().outputs, op) - self._fields_container = Output(forward_fields_container._spec().output_pin(0), 0, op) + self._fields_container = Output( + forward_fields_container._spec().output_pin(0), 0, op + ) self._outputs.append(self._fields_container) @property def fields_container(self): """Allows to get fields_container output of the operator - Returns ---------- - my_fields_container : FieldsContainer, + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward_fields_container() >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 return self._fields_container - diff --git a/ansys/dpf/core/operators/utility/forward_meshes_container.py b/ansys/dpf/core/operators/utility/forward_meshes_container.py index bf316461639..ec879c8b69c 100644 --- a/ansys/dpf/core/operators/utility/forward_meshes_container.py +++ b/ansys/dpf/core/operators/utility/forward_meshes_container.py @@ -1,66 +1,108 @@ """ forward_meshes_container -======================== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class forward_meshes_container(Operator): """Return the input mesh or meshes container into a meshes container. - available inputs: - - meshes (MeshesContainer, MeshedRegion) - - default_label (str) (optional) + Parameters + ---------- + meshes : MeshesContainer or MeshedRegion + default_label : str, optional + This default label is used if a new meshes + container needs to be created + (default is unknown) + - available outputs: - - meshes_container (MeshesContainer) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.forward_meshes_container() - >>> # Instantiate operator - >>> op = dpf.operators.utility.forward_meshes_container() + >>> # Make input connections + >>> my_meshes = dpf.MeshesContainer() + >>> op.inputs.meshes.connect(my_meshes) + >>> my_default_label = str() + >>> op.inputs.default_label.connect(my_default_label) - >>> # Make input connections - >>> my_meshes = dpf.MeshesContainer() - >>> op.inputs.meshes.connect(my_meshes) - >>> my_default_label = str() - >>> op.inputs.default_label.connect(my_default_label) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.forward_meshes_container( + ... meshes=my_meshes, + ... default_label=my_default_label, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.forward_meshes_container(meshes=my_meshes,default_label=my_default_label) + >>> # Get output data + >>> result_meshes_container = op.outputs.meshes_container() + """ - >>> # Get output data - >>> result_meshes_container = op.outputs.meshes_container()""" def __init__(self, meshes=None, default_label=None, config=None, server=None): - super().__init__(name="forward_meshes_container", config = config, server = server) + super().__init__(name="forward_meshes_container", config=config, server=server) self._inputs = InputsForwardMeshesContainer(self) self._outputs = OutputsForwardMeshesContainer(self) - if meshes !=None: + if meshes is not None: self.inputs.meshes.connect(meshes) - if default_label !=None: + if default_label is not None: self.inputs.default_label.connect(default_label) @staticmethod def _spec(): - spec = Specification(description="""Return the input mesh or meshes container into a meshes container.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "meshes", type_names=["meshes_container","abstract_meshed_region"], optional=False, document=""""""), - 1 : PinSpecification(name = "default_label", type_names=["string"], optional=True, document="""this default label is used if a new meshes container needs to be created (default is unknown)""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "meshes_container", type_names=["meshes_container"], optional=False, document="""""")}) + description = ( + """Return the input mesh or meshes container into a meshes container.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="meshes", + type_names=["meshes_container", "abstract_meshed_region"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="default_label", + type_names=["string"], + optional=True, + document="""This default label is used if a new meshes + container needs to be created + (default is unknown)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="meshes_container", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "forward_meshes_container") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="forward_meshes_container", server=server) @property def inputs(self): @@ -68,117 +110,117 @@ def inputs(self): Returns -------- - inputs : InputsForwardMeshesContainer + inputs : InputsForwardMeshesContainer """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsForwardMeshesContainer + outputs : OutputsForwardMeshesContainer """ return super().outputs -#internal name: forward_meshes_container -#scripting name: forward_meshes_container class InputsForwardMeshesContainer(_Inputs): - """Intermediate class used to connect user inputs to forward_meshes_container operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.forward_meshes_container() - >>> my_meshes = dpf.MeshesContainer() - >>> op.inputs.meshes.connect(my_meshes) - >>> my_default_label = str() - >>> op.inputs.default_label.connect(my_default_label) + """Intermediate class used to connect user inputs to + forward_meshes_container operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward_meshes_container() + >>> my_meshes = dpf.MeshesContainer() + >>> op.inputs.meshes.connect(my_meshes) + >>> my_default_label = str() + >>> op.inputs.default_label.connect(my_default_label) """ + def __init__(self, op: Operator): super().__init__(forward_meshes_container._spec().inputs, op) - self._meshes = Input(forward_meshes_container._spec().input_pin(0), 0, op, -1) + self._meshes = Input(forward_meshes_container._spec().input_pin(0), 0, op, -1) self._inputs.append(self._meshes) - self._default_label = Input(forward_meshes_container._spec().input_pin(1), 1, op, -1) + self._default_label = Input( + forward_meshes_container._spec().input_pin(1), 1, op, -1 + ) self._inputs.append(self._default_label) @property def meshes(self): - """Allows to connect meshes input to the operator + """Allows to connect meshes input to the operator. Parameters ---------- - my_meshes : MeshesContainer, MeshedRegion, + my_meshes : MeshesContainer or MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward_meshes_container() >>> op.inputs.meshes.connect(my_meshes) - >>> #or + >>> # or >>> op.inputs.meshes(my_meshes) - """ return self._meshes @property def default_label(self): - """Allows to connect default_label input to the operator + """Allows to connect default_label input to the operator. - - pindoc: this default label is used if a new meshes container needs to be created (default is unknown) + This default label is used if a new meshes + container needs to be created + (default is unknown) Parameters ---------- - my_default_label : str, + my_default_label : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward_meshes_container() >>> op.inputs.default_label.connect(my_default_label) - >>> #or + >>> # or >>> op.inputs.default_label(my_default_label) - """ return self._default_label + class OutputsForwardMeshesContainer(_Outputs): - """Intermediate class used to get outputs from forward_meshes_container operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.forward_meshes_container() - >>> # Connect inputs : op.inputs. ... - >>> result_meshes_container = op.outputs.meshes_container() + """Intermediate class used to get outputs from + forward_meshes_container operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.forward_meshes_container() + >>> # Connect inputs : op.inputs. ... + >>> result_meshes_container = op.outputs.meshes_container() """ + def __init__(self, op: Operator): super().__init__(forward_meshes_container._spec().outputs, op) - self._meshes_container = Output(forward_meshes_container._spec().output_pin(0), 0, op) + self._meshes_container = Output( + forward_meshes_container._spec().output_pin(0), 0, op + ) self._outputs.append(self._meshes_container) @property def meshes_container(self): """Allows to get meshes_container output of the operator - Returns ---------- - my_meshes_container : MeshesContainer, + my_meshes_container : MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.forward_meshes_container() >>> # Connect inputs : op.inputs. ... - >>> result_meshes_container = op.outputs.meshes_container() - """ + >>> result_meshes_container = op.outputs.meshes_container() + """ # noqa: E501 return self._meshes_container - diff --git a/ansys/dpf/core/operators/utility/html_doc.py b/ansys/dpf/core/operators/utility/html_doc.py index f9a5b61a3bb..84679c73bb0 100644 --- a/ansys/dpf/core/operators/utility/html_doc.py +++ b/ansys/dpf/core/operators/utility/html_doc.py @@ -1,59 +1,85 @@ """ html_doc -======== +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class html_doc(Operator): - """Create dpf's html documentation. Only on windows, use deprecated doc for linux + """Create dpf's html documentation. Only on windows, use deprecated doc + for linux - available inputs: - - output_path (str) (optional) + Parameters + ---------- + output_path : str, optional + Default is {working + directory}/dataprocessingdoc.html - available outputs: + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.html_doc() - >>> # Instantiate operator - >>> op = dpf.operators.utility.html_doc() + >>> # Make input connections + >>> my_output_path = str() + >>> op.inputs.output_path.connect(my_output_path) - >>> # Make input connections - >>> my_output_path = str() - >>> op.inputs.output_path.connect(my_output_path) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.html_doc( + ... output_path=my_output_path, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.html_doc(output_path=my_output_path) + """ - >>> # Get output data""" def __init__(self, output_path=None, config=None, server=None): - super().__init__(name="html_doc", config = config, server = server) + super().__init__(name="html_doc", config=config, server=server) self._inputs = InputsHtmlDoc(self) self._outputs = OutputsHtmlDoc(self) - if output_path !=None: + if output_path is not None: self.inputs.output_path.connect(output_path) @staticmethod def _spec(): - spec = Specification(description="""Create dpf's html documentation. Only on windows, use deprecated doc for linux""", - map_input_pin_spec={ - 0 : PinSpecification(name = "output_path", type_names=["string"], optional=True, document="""default is {working directory}/dataProcessingDoc.html""")}, - map_output_pin_spec={ -}) + description = """Create dpf's html documentation. Only on windows, use deprecated doc + for linux""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="output_path", + type_names=["string"], + optional=True, + document="""Default is {working + directory}/dataprocessingdoc.html""", + ), + }, + map_output_pin_spec={}, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "html_doc") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="html_doc", server=server) @property def inputs(self): @@ -61,72 +87,70 @@ def inputs(self): Returns -------- - inputs : InputsHtmlDoc + inputs : InputsHtmlDoc """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsHtmlDoc + outputs : OutputsHtmlDoc """ return super().outputs -#internal name: html_doc -#scripting name: html_doc class InputsHtmlDoc(_Inputs): - """Intermediate class used to connect user inputs to html_doc operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.html_doc() - >>> my_output_path = str() - >>> op.inputs.output_path.connect(my_output_path) + """Intermediate class used to connect user inputs to + html_doc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.html_doc() + >>> my_output_path = str() + >>> op.inputs.output_path.connect(my_output_path) """ + def __init__(self, op: Operator): super().__init__(html_doc._spec().inputs, op) - self._output_path = Input(html_doc._spec().input_pin(0), 0, op, -1) + self._output_path = Input(html_doc._spec().input_pin(0), 0, op, -1) self._inputs.append(self._output_path) @property def output_path(self): - """Allows to connect output_path input to the operator + """Allows to connect output_path input to the operator. - - pindoc: default is {working directory}/dataProcessingDoc.html + Default is {working + directory}/dataprocessingdoc.html Parameters ---------- - my_output_path : str, + my_output_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.html_doc() >>> op.inputs.output_path.connect(my_output_path) - >>> #or + >>> # or >>> op.inputs.output_path(my_output_path) - """ return self._output_path -class OutputsHtmlDoc(_Outputs): - """Intermediate class used to get outputs from html_doc operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.html_doc() - >>> # Connect inputs : op.inputs. ... +class OutputsHtmlDoc(_Outputs): + """Intermediate class used to get outputs from + html_doc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.html_doc() + >>> # Connect inputs : op.inputs. ... """ + def __init__(self, op: Operator): super().__init__(html_doc._spec().outputs, op) - pass - diff --git a/ansys/dpf/core/operators/utility/make_overall.py b/ansys/dpf/core/operators/utility/make_overall.py new file mode 100644 index 00000000000..619dbabb631 --- /dev/null +++ b/ansys/dpf/core/operators/utility/make_overall.py @@ -0,0 +1,211 @@ +""" +make_overall +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class make_overall(Operator): + """Extract a value from a field and make it overall scoping + + Parameters + ---------- + field : Field + id : int + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.make_overall() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_id = int() + >>> op.inputs.id.connect(my_id) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.make_overall( + ... field=my_field, + ... id=my_id, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, field=None, id=None, config=None, server=None): + super().__init__(name="make_overall", config=config, server=server) + self._inputs = InputsMakeOverall(self) + self._outputs = OutputsMakeOverall(self) + if field is not None: + self.inputs.field.connect(field) + if id is not None: + self.inputs.id.connect(id) + + @staticmethod + def _spec(): + description = """Extract a value from a field and make it overall scoping""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="id", + type_names=["int32"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="make_overall", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMakeOverall + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMakeOverall + """ + return super().outputs + + +class InputsMakeOverall(_Inputs): + """Intermediate class used to connect user inputs to + make_overall operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.make_overall() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_id = int() + >>> op.inputs.id.connect(my_id) + """ + + def __init__(self, op: Operator): + super().__init__(make_overall._spec().inputs, op) + self._field = Input(make_overall._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._field) + self._id = Input(make_overall._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._id) + + @property + def field(self): + """Allows to connect field input to the operator. + + Parameters + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.make_overall() + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) + """ + return self._field + + @property + def id(self): + """Allows to connect id input to the operator. + + Parameters + ---------- + my_id : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.make_overall() + >>> op.inputs.id.connect(my_id) + >>> # or + >>> op.inputs.id(my_id) + """ + return self._id + + +class OutputsMakeOverall(_Outputs): + """Intermediate class used to get outputs from + make_overall operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.make_overall() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(make_overall._spec().outputs, op) + self._field = Output(make_overall._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.make_overall() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/utility/merge_fields.py b/ansys/dpf/core/operators/utility/merge_fields.py new file mode 100644 index 00000000000..f6ba8e38101 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_fields.py @@ -0,0 +1,311 @@ +""" +merge_fields +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_fields(Operator): + """Take a set of fields and assemble them in a unique one + + Parameters + ---------- + sum_merge : bool, optional + Default is false. if true redundant + quantities are summed instead of + being ignored. + merged_support : AbstractFieldSupport, optional + Already merged field support. + fields1 : Field + A vector of fields to merge or fields from + pin 0 to ... + fields2 : Field + A vector of fields to merge or fields from + pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_fields() + + >>> # Make input connections + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> my_merged_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_support.connect(my_merged_support) + >>> my_fields1 = dpf.Field() + >>> op.inputs.fields1.connect(my_fields1) + >>> my_fields2 = dpf.Field() + >>> op.inputs.fields2.connect(my_fields2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_fields( + ... sum_merge=my_sum_merge, + ... merged_support=my_merged_support, + ... fields1=my_fields1, + ... fields2=my_fields2, + ... ) + + >>> # Get output data + >>> result_merged_field = op.outputs.merged_field() + """ + + def __init__( + self, + sum_merge=None, + merged_support=None, + fields1=None, + fields2=None, + config=None, + server=None, + ): + super().__init__(name="merge::field", config=config, server=server) + self._inputs = InputsMergeFields(self) + self._outputs = OutputsMergeFields(self) + if sum_merge is not None: + self.inputs.sum_merge.connect(sum_merge) + if merged_support is not None: + self.inputs.merged_support.connect(merged_support) + if fields1 is not None: + self.inputs.fields1.connect(fields1) + if fields2 is not None: + self.inputs.fields2.connect(fields2) + + @staticmethod + def _spec(): + description = """Take a set of fields and assemble them in a unique one""" + spec = Specification( + description=description, + map_input_pin_spec={ + -2: PinSpecification( + name="sum_merge", + type_names=["bool"], + optional=True, + document="""Default is false. if true redundant + quantities are summed instead of + being ignored.""", + ), + -1: PinSpecification( + name="merged_support", + type_names=["abstract_field_support"], + optional=True, + document="""Already merged field support.""", + ), + 0: PinSpecification( + name="fields", + type_names=["field"], + optional=False, + document="""A vector of fields to merge or fields from + pin 0 to ...""", + ), + 1: PinSpecification( + name="fields", + type_names=["field"], + optional=False, + document="""A vector of fields to merge or fields from + pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::field", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeFields + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeFields + """ + return super().outputs + + +class InputsMergeFields(_Inputs): + """Intermediate class used to connect user inputs to + merge_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields() + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> my_merged_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_support.connect(my_merged_support) + >>> my_fields1 = dpf.Field() + >>> op.inputs.fields1.connect(my_fields1) + >>> my_fields2 = dpf.Field() + >>> op.inputs.fields2.connect(my_fields2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_fields._spec().inputs, op) + self._sum_merge = Input(merge_fields._spec().input_pin(-2), -2, op, -1) + self._inputs.append(self._sum_merge) + self._merged_support = Input(merge_fields._spec().input_pin(-1), -1, op, -1) + self._inputs.append(self._merged_support) + self._fields1 = Input(merge_fields._spec().input_pin(0), 0, op, 0) + self._inputs.append(self._fields1) + self._fields2 = Input(merge_fields._spec().input_pin(1), 1, op, 1) + self._inputs.append(self._fields2) + + @property + def sum_merge(self): + """Allows to connect sum_merge input to the operator. + + Default is false. if true redundant + quantities are summed instead of + being ignored. + + Parameters + ---------- + my_sum_merge : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> # or + >>> op.inputs.sum_merge(my_sum_merge) + """ + return self._sum_merge + + @property + def merged_support(self): + """Allows to connect merged_support input to the operator. + + Already merged field support. + + Parameters + ---------- + my_merged_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields() + >>> op.inputs.merged_support.connect(my_merged_support) + >>> # or + >>> op.inputs.merged_support(my_merged_support) + """ + return self._merged_support + + @property + def fields1(self): + """Allows to connect fields1 input to the operator. + + A vector of fields to merge or fields from + pin 0 to ... + + Parameters + ---------- + my_fields1 : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields() + >>> op.inputs.fields1.connect(my_fields1) + >>> # or + >>> op.inputs.fields1(my_fields1) + """ + return self._fields1 + + @property + def fields2(self): + """Allows to connect fields2 input to the operator. + + A vector of fields to merge or fields from + pin 0 to ... + + Parameters + ---------- + my_fields2 : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields() + >>> op.inputs.fields2.connect(my_fields2) + >>> # or + >>> op.inputs.fields2(my_fields2) + """ + return self._fields2 + + +class OutputsMergeFields(_Outputs): + """Intermediate class used to get outputs from + merge_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_field = op.outputs.merged_field() + """ + + def __init__(self, op: Operator): + super().__init__(merge_fields._spec().outputs, op) + self._merged_field = Output(merge_fields._spec().output_pin(0), 0, op) + self._outputs.append(self._merged_field) + + @property + def merged_field(self): + """Allows to get merged_field output of the operator + + Returns + ---------- + my_merged_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_field = op.outputs.merged_field() + """ # noqa: E501 + return self._merged_field diff --git a/ansys/dpf/core/operators/utility/merge_fields_by_label.py b/ansys/dpf/core/operators/utility/merge_fields_by_label.py new file mode 100644 index 00000000000..dec26e803b6 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_fields_by_label.py @@ -0,0 +1,346 @@ +""" +merge_fields_by_label +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_fields_by_label(Operator): + """Take a fields container and merge its fields that share the same label + value. + + Parameters + ---------- + fields_container : FieldsContainer + label : str + Label identifier that should be merged. + merged_field_support : AbstractFieldSupport, optional + The fieldscontainer's support that has + already been merged. + sumMerge : bool, optional + Default is false. if true redundant + quantities are summed instead of + being ignored. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_fields_by_label() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_label = str() + >>> op.inputs.label.connect(my_label) + >>> my_merged_field_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_field_support.connect(my_merged_field_support) + >>> my_sumMerge = bool() + >>> op.inputs.sumMerge.connect(my_sumMerge) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_fields_by_label( + ... fields_container=my_fields_container, + ... label=my_label, + ... merged_field_support=my_merged_field_support, + ... sumMerge=my_sumMerge, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + >>> result_merged_field_support = op.outputs.merged_field_support() + """ + + def __init__( + self, + fields_container=None, + label=None, + merged_field_support=None, + sumMerge=None, + config=None, + server=None, + ): + super().__init__( + name="merge::fields_container_label", config=config, server=server + ) + self._inputs = InputsMergeFieldsByLabel(self) + self._outputs = OutputsMergeFieldsByLabel(self) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if label is not None: + self.inputs.label.connect(label) + if merged_field_support is not None: + self.inputs.merged_field_support.connect(merged_field_support) + if sumMerge is not None: + self.inputs.sumMerge.connect(sumMerge) + + @staticmethod + def _spec(): + description = """Take a fields container and merge its fields that share the same label + value.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="label", + type_names=["string"], + optional=False, + document="""Label identifier that should be merged.""", + ), + 2: PinSpecification( + name="merged_field_support", + type_names=["abstract_field_support"], + optional=True, + document="""The fieldscontainer's support that has + already been merged.""", + ), + 3: PinSpecification( + name="sumMerge", + type_names=["bool"], + optional=True, + document="""Default is false. if true redundant + quantities are summed instead of + being ignored.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="merged_field_support", + type_names=["abstract_field_support"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="merge::fields_container_label", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeFieldsByLabel + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeFieldsByLabel + """ + return super().outputs + + +class InputsMergeFieldsByLabel(_Inputs): + """Intermediate class used to connect user inputs to + merge_fields_by_label operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_by_label() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_label = str() + >>> op.inputs.label.connect(my_label) + >>> my_merged_field_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_field_support.connect(my_merged_field_support) + >>> my_sumMerge = bool() + >>> op.inputs.sumMerge.connect(my_sumMerge) + """ + + def __init__(self, op: Operator): + super().__init__(merge_fields_by_label._spec().inputs, op) + self._fields_container = Input( + merge_fields_by_label._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._fields_container) + self._label = Input(merge_fields_by_label._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._label) + self._merged_field_support = Input( + merge_fields_by_label._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._merged_field_support) + self._sumMerge = Input(merge_fields_by_label._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._sumMerge) + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_by_label() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def label(self): + """Allows to connect label input to the operator. + + Label identifier that should be merged. + + Parameters + ---------- + my_label : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_by_label() + >>> op.inputs.label.connect(my_label) + >>> # or + >>> op.inputs.label(my_label) + """ + return self._label + + @property + def merged_field_support(self): + """Allows to connect merged_field_support input to the operator. + + The fieldscontainer's support that has + already been merged. + + Parameters + ---------- + my_merged_field_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_by_label() + >>> op.inputs.merged_field_support.connect(my_merged_field_support) + >>> # or + >>> op.inputs.merged_field_support(my_merged_field_support) + """ + return self._merged_field_support + + @property + def sumMerge(self): + """Allows to connect sumMerge input to the operator. + + Default is false. if true redundant + quantities are summed instead of + being ignored. + + Parameters + ---------- + my_sumMerge : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_by_label() + >>> op.inputs.sumMerge.connect(my_sumMerge) + >>> # or + >>> op.inputs.sumMerge(my_sumMerge) + """ + return self._sumMerge + + +class OutputsMergeFieldsByLabel(_Outputs): + """Intermediate class used to get outputs from + merge_fields_by_label operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_by_label() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + >>> result_merged_field_support = op.outputs.merged_field_support() + """ + + def __init__(self, op: Operator): + super().__init__(merge_fields_by_label._spec().outputs, op) + self._fields_container = Output( + merge_fields_by_label._spec().output_pin(0), 0, op + ) + self._outputs.append(self._fields_container) + self._merged_field_support = Output( + merge_fields_by_label._spec().output_pin(1), 1, op + ) + self._outputs.append(self._merged_field_support) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_by_label() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container + + @property + def merged_field_support(self): + """Allows to get merged_field_support output of the operator + + Returns + ---------- + my_merged_field_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_by_label() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_field_support = op.outputs.merged_field_support() + """ # noqa: E501 + return self._merged_field_support diff --git a/ansys/dpf/core/operators/utility/merge_fields_containers.py b/ansys/dpf/core/operators/utility/merge_fields_containers.py new file mode 100644 index 00000000000..df54807f471 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_fields_containers.py @@ -0,0 +1,322 @@ +""" +merge_fields_containers +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_fields_containers(Operator): + """Take a set of fields containers and assemble them in a unique one + + Parameters + ---------- + merged_fields_support : AbstractFieldSupport, optional + Already merged field support. + merged_fields_containers_support : AbstractFieldSupport, optional + Already merged fields containers support. + fields_containers1 : FieldsContainer + A vector of fields containers to merge or + fields containers from pin 0 to ... + fields_containers2 : FieldsContainer + A vector of fields containers to merge or + fields containers from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_fields_containers() + + >>> # Make input connections + >>> my_merged_fields_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_fields_support.connect(my_merged_fields_support) + >>> my_merged_fields_containers_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_fields_containers_support.connect(my_merged_fields_containers_support) + >>> my_fields_containers1 = dpf.FieldsContainer() + >>> op.inputs.fields_containers1.connect(my_fields_containers1) + >>> my_fields_containers2 = dpf.FieldsContainer() + >>> op.inputs.fields_containers2.connect(my_fields_containers2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_fields_containers( + ... merged_fields_support=my_merged_fields_support, + ... merged_fields_containers_support=my_merged_fields_containers_support, + ... fields_containers1=my_fields_containers1, + ... fields_containers2=my_fields_containers2, + ... ) + + >>> # Get output data + >>> result_merged_fields_container = op.outputs.merged_fields_container() + """ + + def __init__( + self, + merged_fields_support=None, + merged_fields_containers_support=None, + fields_containers1=None, + fields_containers2=None, + config=None, + server=None, + ): + super().__init__(name="merge::fields_container", config=config, server=server) + self._inputs = InputsMergeFieldsContainers(self) + self._outputs = OutputsMergeFieldsContainers(self) + if merged_fields_support is not None: + self.inputs.merged_fields_support.connect(merged_fields_support) + if merged_fields_containers_support is not None: + self.inputs.merged_fields_containers_support.connect( + merged_fields_containers_support + ) + if fields_containers1 is not None: + self.inputs.fields_containers1.connect(fields_containers1) + if fields_containers2 is not None: + self.inputs.fields_containers2.connect(fields_containers2) + + @staticmethod + def _spec(): + description = ( + """Take a set of fields containers and assemble them in a unique one""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + -2: PinSpecification( + name="merged_fields_support", + type_names=["abstract_field_support"], + optional=True, + document="""Already merged field support.""", + ), + -1: PinSpecification( + name="merged_fields_containers_support", + type_names=[ + "abstract_field_support", + "umap>", + ], + optional=True, + document="""Already merged fields containers support.""", + ), + 0: PinSpecification( + name="fields_containers", + type_names=["fields_container"], + optional=False, + document="""A vector of fields containers to merge or + fields containers from pin 0 to ...""", + ), + 1: PinSpecification( + name="fields_containers", + type_names=["fields_container"], + optional=False, + document="""A vector of fields containers to merge or + fields containers from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::fields_container", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeFieldsContainers + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeFieldsContainers + """ + return super().outputs + + +class InputsMergeFieldsContainers(_Inputs): + """Intermediate class used to connect user inputs to + merge_fields_containers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_containers() + >>> my_merged_fields_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_fields_support.connect(my_merged_fields_support) + >>> my_merged_fields_containers_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_fields_containers_support.connect(my_merged_fields_containers_support) + >>> my_fields_containers1 = dpf.FieldsContainer() + >>> op.inputs.fields_containers1.connect(my_fields_containers1) + >>> my_fields_containers2 = dpf.FieldsContainer() + >>> op.inputs.fields_containers2.connect(my_fields_containers2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_fields_containers._spec().inputs, op) + self._merged_fields_support = Input( + merge_fields_containers._spec().input_pin(-2), -2, op, -1 + ) + self._inputs.append(self._merged_fields_support) + self._merged_fields_containers_support = Input( + merge_fields_containers._spec().input_pin(-1), -1, op, -1 + ) + self._inputs.append(self._merged_fields_containers_support) + self._fields_containers1 = Input( + merge_fields_containers._spec().input_pin(0), 0, op, 0 + ) + self._inputs.append(self._fields_containers1) + self._fields_containers2 = Input( + merge_fields_containers._spec().input_pin(1), 1, op, 1 + ) + self._inputs.append(self._fields_containers2) + + @property + def merged_fields_support(self): + """Allows to connect merged_fields_support input to the operator. + + Already merged field support. + + Parameters + ---------- + my_merged_fields_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_containers() + >>> op.inputs.merged_fields_support.connect(my_merged_fields_support) + >>> # or + >>> op.inputs.merged_fields_support(my_merged_fields_support) + """ + return self._merged_fields_support + + @property + def merged_fields_containers_support(self): + """Allows to connect merged_fields_containers_support input to the operator. + + Already merged fields containers support. + + Parameters + ---------- + my_merged_fields_containers_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_containers() + >>> op.inputs.merged_fields_containers_support.connect(my_merged_fields_containers_support) + >>> # or + >>> op.inputs.merged_fields_containers_support(my_merged_fields_containers_support) + """ + return self._merged_fields_containers_support + + @property + def fields_containers1(self): + """Allows to connect fields_containers1 input to the operator. + + A vector of fields containers to merge or + fields containers from pin 0 to ... + + Parameters + ---------- + my_fields_containers1 : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_containers() + >>> op.inputs.fields_containers1.connect(my_fields_containers1) + >>> # or + >>> op.inputs.fields_containers1(my_fields_containers1) + """ + return self._fields_containers1 + + @property + def fields_containers2(self): + """Allows to connect fields_containers2 input to the operator. + + A vector of fields containers to merge or + fields containers from pin 0 to ... + + Parameters + ---------- + my_fields_containers2 : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_containers() + >>> op.inputs.fields_containers2.connect(my_fields_containers2) + >>> # or + >>> op.inputs.fields_containers2(my_fields_containers2) + """ + return self._fields_containers2 + + +class OutputsMergeFieldsContainers(_Outputs): + """Intermediate class used to get outputs from + merge_fields_containers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_containers() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_fields_container = op.outputs.merged_fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(merge_fields_containers._spec().outputs, op) + self._merged_fields_container = Output( + merge_fields_containers._spec().output_pin(0), 0, op + ) + self._outputs.append(self._merged_fields_container) + + @property + def merged_fields_container(self): + """Allows to get merged_fields_container output of the operator + + Returns + ---------- + my_merged_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_containers() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_fields_container = op.outputs.merged_fields_container() + """ # noqa: E501 + return self._merged_fields_container diff --git a/ansys/dpf/core/operators/utility/merge_materials.py b/ansys/dpf/core/operators/utility/merge_materials.py new file mode 100644 index 00000000000..c127963a005 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_materials.py @@ -0,0 +1,223 @@ +""" +merge_materials +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_materials(Operator): + """Take a set of materials and assemble them in a unique one + + Parameters + ---------- + materials1 : Materials + A vector of materials to merge or materials + from pin 0 to ... + materials2 : Materials + A vector of materials to merge or materials + from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_materials() + + >>> # Make input connections + >>> my_materials1 = dpf.Materials() + >>> op.inputs.materials1.connect(my_materials1) + >>> my_materials2 = dpf.Materials() + >>> op.inputs.materials2.connect(my_materials2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_materials( + ... materials1=my_materials1, + ... materials2=my_materials2, + ... ) + + >>> # Get output data + >>> result_merged_materials = op.outputs.merged_materials() + """ + + def __init__(self, materials1=None, materials2=None, config=None, server=None): + super().__init__(name="merge::materials", config=config, server=server) + self._inputs = InputsMergeMaterials(self) + self._outputs = OutputsMergeMaterials(self) + if materials1 is not None: + self.inputs.materials1.connect(materials1) + if materials2 is not None: + self.inputs.materials2.connect(materials2) + + @staticmethod + def _spec(): + description = """Take a set of materials and assemble them in a unique one""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="materials", + type_names=["materials"], + optional=False, + document="""A vector of materials to merge or materials + from pin 0 to ...""", + ), + 1: PinSpecification( + name="materials", + type_names=["materials"], + optional=False, + document="""A vector of materials to merge or materials + from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_materials", + type_names=["materials"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::materials", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeMaterials + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeMaterials + """ + return super().outputs + + +class InputsMergeMaterials(_Inputs): + """Intermediate class used to connect user inputs to + merge_materials operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_materials() + >>> my_materials1 = dpf.Materials() + >>> op.inputs.materials1.connect(my_materials1) + >>> my_materials2 = dpf.Materials() + >>> op.inputs.materials2.connect(my_materials2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_materials._spec().inputs, op) + self._materials1 = Input(merge_materials._spec().input_pin(0), 0, op, 0) + self._inputs.append(self._materials1) + self._materials2 = Input(merge_materials._spec().input_pin(1), 1, op, 1) + self._inputs.append(self._materials2) + + @property + def materials1(self): + """Allows to connect materials1 input to the operator. + + A vector of materials to merge or materials + from pin 0 to ... + + Parameters + ---------- + my_materials1 : Materials + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_materials() + >>> op.inputs.materials1.connect(my_materials1) + >>> # or + >>> op.inputs.materials1(my_materials1) + """ + return self._materials1 + + @property + def materials2(self): + """Allows to connect materials2 input to the operator. + + A vector of materials to merge or materials + from pin 0 to ... + + Parameters + ---------- + my_materials2 : Materials + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_materials() + >>> op.inputs.materials2.connect(my_materials2) + >>> # or + >>> op.inputs.materials2(my_materials2) + """ + return self._materials2 + + +class OutputsMergeMaterials(_Outputs): + """Intermediate class used to get outputs from + merge_materials operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_materials() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_materials = op.outputs.merged_materials() + """ + + def __init__(self, op: Operator): + super().__init__(merge_materials._spec().outputs, op) + self._merged_materials = Output(merge_materials._spec().output_pin(0), 0, op) + self._outputs.append(self._merged_materials) + + @property + def merged_materials(self): + """Allows to get merged_materials output of the operator + + Returns + ---------- + my_merged_materials : Materials + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_materials() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_materials = op.outputs.merged_materials() + """ # noqa: E501 + return self._merged_materials diff --git a/ansys/dpf/core/operators/utility/merge_meshes.py b/ansys/dpf/core/operators/utility/merge_meshes.py new file mode 100644 index 00000000000..b1a6e85fffd --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_meshes.py @@ -0,0 +1,223 @@ +""" +merge_meshes +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_meshes(Operator): + """Take a set of mesh and assemble them in a unique one + + Parameters + ---------- + meshes1 : MeshedRegion + A vector of meshed region to merge or meshed + region from pin 0 to ... + meshes2 : MeshedRegion + A vector of meshed region to merge or meshed + region from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_meshes() + + >>> # Make input connections + >>> my_meshes1 = dpf.MeshedRegion() + >>> op.inputs.meshes1.connect(my_meshes1) + >>> my_meshes2 = dpf.MeshedRegion() + >>> op.inputs.meshes2.connect(my_meshes2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_meshes( + ... meshes1=my_meshes1, + ... meshes2=my_meshes2, + ... ) + + >>> # Get output data + >>> result_merges_mesh = op.outputs.merges_mesh() + """ + + def __init__(self, meshes1=None, meshes2=None, config=None, server=None): + super().__init__(name="merge::mesh", config=config, server=server) + self._inputs = InputsMergeMeshes(self) + self._outputs = OutputsMergeMeshes(self) + if meshes1 is not None: + self.inputs.meshes1.connect(meshes1) + if meshes2 is not None: + self.inputs.meshes2.connect(meshes2) + + @staticmethod + def _spec(): + description = """Take a set of mesh and assemble them in a unique one""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="meshes", + type_names=["abstract_meshed_region"], + optional=False, + document="""A vector of meshed region to merge or meshed + region from pin 0 to ...""", + ), + 1: PinSpecification( + name="meshes", + type_names=["abstract_meshed_region"], + optional=False, + document="""A vector of meshed region to merge or meshed + region from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merges_mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::mesh", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeMeshes + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeMeshes + """ + return super().outputs + + +class InputsMergeMeshes(_Inputs): + """Intermediate class used to connect user inputs to + merge_meshes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes() + >>> my_meshes1 = dpf.MeshedRegion() + >>> op.inputs.meshes1.connect(my_meshes1) + >>> my_meshes2 = dpf.MeshedRegion() + >>> op.inputs.meshes2.connect(my_meshes2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_meshes._spec().inputs, op) + self._meshes1 = Input(merge_meshes._spec().input_pin(0), 0, op, 0) + self._inputs.append(self._meshes1) + self._meshes2 = Input(merge_meshes._spec().input_pin(1), 1, op, 1) + self._inputs.append(self._meshes2) + + @property + def meshes1(self): + """Allows to connect meshes1 input to the operator. + + A vector of meshed region to merge or meshed + region from pin 0 to ... + + Parameters + ---------- + my_meshes1 : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes() + >>> op.inputs.meshes1.connect(my_meshes1) + >>> # or + >>> op.inputs.meshes1(my_meshes1) + """ + return self._meshes1 + + @property + def meshes2(self): + """Allows to connect meshes2 input to the operator. + + A vector of meshed region to merge or meshed + region from pin 0 to ... + + Parameters + ---------- + my_meshes2 : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes() + >>> op.inputs.meshes2.connect(my_meshes2) + >>> # or + >>> op.inputs.meshes2(my_meshes2) + """ + return self._meshes2 + + +class OutputsMergeMeshes(_Outputs): + """Intermediate class used to get outputs from + merge_meshes operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes() + >>> # Connect inputs : op.inputs. ... + >>> result_merges_mesh = op.outputs.merges_mesh() + """ + + def __init__(self, op: Operator): + super().__init__(merge_meshes._spec().outputs, op) + self._merges_mesh = Output(merge_meshes._spec().output_pin(0), 0, op) + self._outputs.append(self._merges_mesh) + + @property + def merges_mesh(self): + """Allows to get merges_mesh output of the operator + + Returns + ---------- + my_merges_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes() + >>> # Connect inputs : op.inputs. ... + >>> result_merges_mesh = op.outputs.merges_mesh() + """ # noqa: E501 + return self._merges_mesh diff --git a/ansys/dpf/core/operators/utility/merge_meshes_containers.py b/ansys/dpf/core/operators/utility/merge_meshes_containers.py new file mode 100644 index 00000000000..93742195791 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_meshes_containers.py @@ -0,0 +1,233 @@ +""" +merge_meshes_containers +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_meshes_containers(Operator): + """Take a set of meshes containers and assemble them in a unique one + + Parameters + ---------- + meshes_containers1 : MeshesContainer + A vector of meshes containers to merge or + meshes containers from pin 0 to ... + meshes_containers2 : MeshesContainer + A vector of meshes containers to merge or + meshes containers from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_meshes_containers() + + >>> # Make input connections + >>> my_meshes_containers1 = dpf.MeshesContainer() + >>> op.inputs.meshes_containers1.connect(my_meshes_containers1) + >>> my_meshes_containers2 = dpf.MeshesContainer() + >>> op.inputs.meshes_containers2.connect(my_meshes_containers2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_meshes_containers( + ... meshes_containers1=my_meshes_containers1, + ... meshes_containers2=my_meshes_containers2, + ... ) + + >>> # Get output data + >>> result_merged_meshes_container = op.outputs.merged_meshes_container() + """ + + def __init__( + self, meshes_containers1=None, meshes_containers2=None, config=None, server=None + ): + super().__init__(name="merge::meshes_container", config=config, server=server) + self._inputs = InputsMergeMeshesContainers(self) + self._outputs = OutputsMergeMeshesContainers(self) + if meshes_containers1 is not None: + self.inputs.meshes_containers1.connect(meshes_containers1) + if meshes_containers2 is not None: + self.inputs.meshes_containers2.connect(meshes_containers2) + + @staticmethod + def _spec(): + description = ( + """Take a set of meshes containers and assemble them in a unique one""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="meshes_containers", + type_names=["meshes_container"], + optional=False, + document="""A vector of meshes containers to merge or + meshes containers from pin 0 to ...""", + ), + 1: PinSpecification( + name="meshes_containers", + type_names=["meshes_container"], + optional=False, + document="""A vector of meshes containers to merge or + meshes containers from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_meshes_container", + type_names=["meshes_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::meshes_container", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeMeshesContainers + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeMeshesContainers + """ + return super().outputs + + +class InputsMergeMeshesContainers(_Inputs): + """Intermediate class used to connect user inputs to + merge_meshes_containers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes_containers() + >>> my_meshes_containers1 = dpf.MeshesContainer() + >>> op.inputs.meshes_containers1.connect(my_meshes_containers1) + >>> my_meshes_containers2 = dpf.MeshesContainer() + >>> op.inputs.meshes_containers2.connect(my_meshes_containers2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_meshes_containers._spec().inputs, op) + self._meshes_containers1 = Input( + merge_meshes_containers._spec().input_pin(0), 0, op, 0 + ) + self._inputs.append(self._meshes_containers1) + self._meshes_containers2 = Input( + merge_meshes_containers._spec().input_pin(1), 1, op, 1 + ) + self._inputs.append(self._meshes_containers2) + + @property + def meshes_containers1(self): + """Allows to connect meshes_containers1 input to the operator. + + A vector of meshes containers to merge or + meshes containers from pin 0 to ... + + Parameters + ---------- + my_meshes_containers1 : MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes_containers() + >>> op.inputs.meshes_containers1.connect(my_meshes_containers1) + >>> # or + >>> op.inputs.meshes_containers1(my_meshes_containers1) + """ + return self._meshes_containers1 + + @property + def meshes_containers2(self): + """Allows to connect meshes_containers2 input to the operator. + + A vector of meshes containers to merge or + meshes containers from pin 0 to ... + + Parameters + ---------- + my_meshes_containers2 : MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes_containers() + >>> op.inputs.meshes_containers2.connect(my_meshes_containers2) + >>> # or + >>> op.inputs.meshes_containers2(my_meshes_containers2) + """ + return self._meshes_containers2 + + +class OutputsMergeMeshesContainers(_Outputs): + """Intermediate class used to get outputs from + merge_meshes_containers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes_containers() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_meshes_container = op.outputs.merged_meshes_container() + """ + + def __init__(self, op: Operator): + super().__init__(merge_meshes_containers._spec().outputs, op) + self._merged_meshes_container = Output( + merge_meshes_containers._spec().output_pin(0), 0, op + ) + self._outputs.append(self._merged_meshes_container) + + @property + def merged_meshes_container(self): + """Allows to get merged_meshes_container output of the operator + + Returns + ---------- + my_merged_meshes_container : MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_meshes_containers() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_meshes_container = op.outputs.merged_meshes_container() + """ # noqa: E501 + return self._merged_meshes_container diff --git a/ansys/dpf/core/operators/utility/merge_property_fields.py b/ansys/dpf/core/operators/utility/merge_property_fields.py new file mode 100644 index 00000000000..490cbd6c512 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_property_fields.py @@ -0,0 +1,241 @@ +""" +merge_property_fields +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_property_fields(Operator): + """Take a set of property fields and assemble them in a unique one + + Parameters + ---------- + vector_shared_ptr_property_field__1 : PropertyField + A vector of property fields to merge or + property fields from pin 0 to ... + vector_shared_ptr_property_field__2 : PropertyField + A vector of property fields to merge or + property fields from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_property_fields() + + >>> # Make input connections + >>> my_vector_shared_ptr_property_field__1 = dpf.PropertyField() + >>> op.inputs.vector_shared_ptr_property_field__1.connect(my_vector_shared_ptr_property_field__1) + >>> my_vector_shared_ptr_property_field__2 = dpf.PropertyField() + >>> op.inputs.vector_shared_ptr_property_field__2.connect(my_vector_shared_ptr_property_field__2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_property_fields( + ... vector_shared_ptr_property_field__1=my_vector_shared_ptr_property_field__1, + ... vector_shared_ptr_property_field__2=my_vector_shared_ptr_property_field__2, + ... ) + + >>> # Get output data + >>> result_property_field = op.outputs.property_field() + """ + + def __init__( + self, + vector_shared_ptr_property_field__1=None, + vector_shared_ptr_property_field__2=None, + config=None, + server=None, + ): + super().__init__(name="merge::property_field", config=config, server=server) + self._inputs = InputsMergePropertyFields(self) + self._outputs = OutputsMergePropertyFields(self) + if vector_shared_ptr_property_field__1 is not None: + self.inputs.vector_shared_ptr_property_field__1.connect( + vector_shared_ptr_property_field__1 + ) + if vector_shared_ptr_property_field__2 is not None: + self.inputs.vector_shared_ptr_property_field__2.connect( + vector_shared_ptr_property_field__2 + ) + + @staticmethod + def _spec(): + description = ( + """Take a set of property fields and assemble them in a unique one""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="vector_shared_ptr_property_field__", + type_names=["property_field"], + optional=False, + document="""A vector of property fields to merge or + property fields from pin 0 to ...""", + ), + 1: PinSpecification( + name="vector_shared_ptr_property_field__", + type_names=["property_field"], + optional=False, + document="""A vector of property fields to merge or + property fields from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="property_field", + type_names=["property_field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::property_field", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergePropertyFields + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergePropertyFields + """ + return super().outputs + + +class InputsMergePropertyFields(_Inputs): + """Intermediate class used to connect user inputs to + merge_property_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_property_fields() + >>> my_vector_shared_ptr_property_field__1 = dpf.PropertyField() + >>> op.inputs.vector_shared_ptr_property_field__1.connect(my_vector_shared_ptr_property_field__1) + >>> my_vector_shared_ptr_property_field__2 = dpf.PropertyField() + >>> op.inputs.vector_shared_ptr_property_field__2.connect(my_vector_shared_ptr_property_field__2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_property_fields._spec().inputs, op) + self._vector_shared_ptr_property_field__1 = Input( + merge_property_fields._spec().input_pin(0), 0, op, 0 + ) + self._inputs.append(self._vector_shared_ptr_property_field__1) + self._vector_shared_ptr_property_field__2 = Input( + merge_property_fields._spec().input_pin(1), 1, op, 1 + ) + self._inputs.append(self._vector_shared_ptr_property_field__2) + + @property + def vector_shared_ptr_property_field__1(self): + """Allows to connect vector_shared_ptr_property_field__1 input to the operator. + + A vector of property fields to merge or + property fields from pin 0 to ... + + Parameters + ---------- + my_vector_shared_ptr_property_field__1 : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_property_fields() + >>> op.inputs.vector_shared_ptr_property_field__1.connect(my_vector_shared_ptr_property_field__1) + >>> # or + >>> op.inputs.vector_shared_ptr_property_field__1(my_vector_shared_ptr_property_field__1) + """ + return self._vector_shared_ptr_property_field__1 + + @property + def vector_shared_ptr_property_field__2(self): + """Allows to connect vector_shared_ptr_property_field__2 input to the operator. + + A vector of property fields to merge or + property fields from pin 0 to ... + + Parameters + ---------- + my_vector_shared_ptr_property_field__2 : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_property_fields() + >>> op.inputs.vector_shared_ptr_property_field__2.connect(my_vector_shared_ptr_property_field__2) + >>> # or + >>> op.inputs.vector_shared_ptr_property_field__2(my_vector_shared_ptr_property_field__2) + """ + return self._vector_shared_ptr_property_field__2 + + +class OutputsMergePropertyFields(_Outputs): + """Intermediate class used to get outputs from + merge_property_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_property_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_property_field = op.outputs.property_field() + """ + + def __init__(self, op: Operator): + super().__init__(merge_property_fields._spec().outputs, op) + self._property_field = Output( + merge_property_fields._spec().output_pin(0), 0, op + ) + self._outputs.append(self._property_field) + + @property + def property_field(self): + """Allows to get property_field output of the operator + + Returns + ---------- + my_property_field : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_property_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_property_field = op.outputs.property_field() + """ # noqa: E501 + return self._property_field diff --git a/ansys/dpf/core/operators/utility/merge_result_infos.py b/ansys/dpf/core/operators/utility/merge_result_infos.py new file mode 100644 index 00000000000..ab4ef08c058 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_result_infos.py @@ -0,0 +1,227 @@ +""" +merge_result_infos +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_result_infos(Operator): + """Take a set of result info and assemble them in a unique one + + Parameters + ---------- + result_infos1 : ResultInfo + A vector of result info containers to merge + or result infos from pin 0 to ... + result_infos2 : ResultInfo + A vector of result info containers to merge + or result infos from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_result_infos() + + >>> # Make input connections + >>> my_result_infos1 = dpf.ResultInfo() + >>> op.inputs.result_infos1.connect(my_result_infos1) + >>> my_result_infos2 = dpf.ResultInfo() + >>> op.inputs.result_infos2.connect(my_result_infos2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_result_infos( + ... result_infos1=my_result_infos1, + ... result_infos2=my_result_infos2, + ... ) + + >>> # Get output data + >>> result_merged_result_infos = op.outputs.merged_result_infos() + """ + + def __init__( + self, result_infos1=None, result_infos2=None, config=None, server=None + ): + super().__init__(name="merge::result_info", config=config, server=server) + self._inputs = InputsMergeResultInfos(self) + self._outputs = OutputsMergeResultInfos(self) + if result_infos1 is not None: + self.inputs.result_infos1.connect(result_infos1) + if result_infos2 is not None: + self.inputs.result_infos2.connect(result_infos2) + + @staticmethod + def _spec(): + description = """Take a set of result info and assemble them in a unique one""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="result_infos", + type_names=["result_info"], + optional=False, + document="""A vector of result info containers to merge + or result infos from pin 0 to ...""", + ), + 1: PinSpecification( + name="result_infos", + type_names=["result_info"], + optional=False, + document="""A vector of result info containers to merge + or result infos from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_result_infos", + type_names=["result_info"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::result_info", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeResultInfos + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeResultInfos + """ + return super().outputs + + +class InputsMergeResultInfos(_Inputs): + """Intermediate class used to connect user inputs to + merge_result_infos operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_result_infos() + >>> my_result_infos1 = dpf.ResultInfo() + >>> op.inputs.result_infos1.connect(my_result_infos1) + >>> my_result_infos2 = dpf.ResultInfo() + >>> op.inputs.result_infos2.connect(my_result_infos2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_result_infos._spec().inputs, op) + self._result_infos1 = Input(merge_result_infos._spec().input_pin(0), 0, op, 0) + self._inputs.append(self._result_infos1) + self._result_infos2 = Input(merge_result_infos._spec().input_pin(1), 1, op, 1) + self._inputs.append(self._result_infos2) + + @property + def result_infos1(self): + """Allows to connect result_infos1 input to the operator. + + A vector of result info containers to merge + or result infos from pin 0 to ... + + Parameters + ---------- + my_result_infos1 : ResultInfo + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_result_infos() + >>> op.inputs.result_infos1.connect(my_result_infos1) + >>> # or + >>> op.inputs.result_infos1(my_result_infos1) + """ + return self._result_infos1 + + @property + def result_infos2(self): + """Allows to connect result_infos2 input to the operator. + + A vector of result info containers to merge + or result infos from pin 0 to ... + + Parameters + ---------- + my_result_infos2 : ResultInfo + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_result_infos() + >>> op.inputs.result_infos2.connect(my_result_infos2) + >>> # or + >>> op.inputs.result_infos2(my_result_infos2) + """ + return self._result_infos2 + + +class OutputsMergeResultInfos(_Outputs): + """Intermediate class used to get outputs from + merge_result_infos operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_result_infos() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_result_infos = op.outputs.merged_result_infos() + """ + + def __init__(self, op: Operator): + super().__init__(merge_result_infos._spec().outputs, op) + self._merged_result_infos = Output( + merge_result_infos._spec().output_pin(0), 0, op + ) + self._outputs.append(self._merged_result_infos) + + @property + def merged_result_infos(self): + """Allows to get merged_result_infos output of the operator + + Returns + ---------- + my_merged_result_infos : ResultInfo + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_result_infos() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_result_infos = op.outputs.merged_result_infos() + """ # noqa: E501 + return self._merged_result_infos diff --git a/ansys/dpf/core/operators/utility/merge_scopings.py b/ansys/dpf/core/operators/utility/merge_scopings.py new file mode 100644 index 00000000000..ddaf60ee4c0 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_scopings.py @@ -0,0 +1,183 @@ +""" +merge_scopings +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_scopings(Operator): + """Take a set of scoping and assemble them in a unique one + + Parameters + ---------- + scopings : Scoping + A vector of result info containers to merge + or scopings from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_scopings() + + >>> # Make input connections + >>> my_scopings = dpf.Scoping() + >>> op.inputs.scopings.connect(my_scopings) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_scopings( + ... scopings=my_scopings, + ... ) + + >>> # Get output data + >>> result_merged_scoping = op.outputs.merged_scoping() + """ + + def __init__(self, scopings=None, config=None, server=None): + super().__init__(name="merge::scoping", config=config, server=server) + self._inputs = InputsMergeScopings(self) + self._outputs = OutputsMergeScopings(self) + if scopings is not None: + self.inputs.scopings.connect(scopings) + + @staticmethod + def _spec(): + description = """Take a set of scoping and assemble them in a unique one""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="scopings", + type_names=["vector>", "scoping"], + optional=False, + document="""A vector of result info containers to merge + or scopings from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::scoping", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeScopings + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeScopings + """ + return super().outputs + + +class InputsMergeScopings(_Inputs): + """Intermediate class used to connect user inputs to + merge_scopings operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings() + >>> my_scopings = dpf.Scoping() + >>> op.inputs.scopings.connect(my_scopings) + """ + + def __init__(self, op: Operator): + super().__init__(merge_scopings._spec().inputs, op) + self._scopings = Input(merge_scopings._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._scopings) + + @property + def scopings(self): + """Allows to connect scopings input to the operator. + + A vector of result info containers to merge + or scopings from pin 0 to ... + + Parameters + ---------- + my_scopings : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings() + >>> op.inputs.scopings.connect(my_scopings) + >>> # or + >>> op.inputs.scopings(my_scopings) + """ + return self._scopings + + +class OutputsMergeScopings(_Outputs): + """Intermediate class used to get outputs from + merge_scopings operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_scoping = op.outputs.merged_scoping() + """ + + def __init__(self, op: Operator): + super().__init__(merge_scopings._spec().outputs, op) + self._merged_scoping = Output(merge_scopings._spec().output_pin(0), 0, op) + self._outputs.append(self._merged_scoping) + + @property + def merged_scoping(self): + """Allows to get merged_scoping output of the operator + + Returns + ---------- + my_merged_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_scoping = op.outputs.merged_scoping() + """ # noqa: E501 + return self._merged_scoping diff --git a/ansys/dpf/core/operators/utility/merge_scopings_containers.py b/ansys/dpf/core/operators/utility/merge_scopings_containers.py new file mode 100644 index 00000000000..b1da62c3d37 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_scopings_containers.py @@ -0,0 +1,237 @@ +""" +merge_scopings_containers +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_scopings_containers(Operator): + """Take a set of scopings containers and assemble them in a unique one + + Parameters + ---------- + scopings_containers1 : ScopingsContainer + A vector of scopings containers to merge or + scopings containers from pin 0 to ... + scopings_containers2 : ScopingsContainer + A vector of scopings containers to merge or + scopings containers from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_scopings_containers() + + >>> # Make input connections + >>> my_scopings_containers1 = dpf.ScopingsContainer() + >>> op.inputs.scopings_containers1.connect(my_scopings_containers1) + >>> my_scopings_containers2 = dpf.ScopingsContainer() + >>> op.inputs.scopings_containers2.connect(my_scopings_containers2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_scopings_containers( + ... scopings_containers1=my_scopings_containers1, + ... scopings_containers2=my_scopings_containers2, + ... ) + + >>> # Get output data + >>> result_merged_scopings_container = op.outputs.merged_scopings_container() + """ + + def __init__( + self, + scopings_containers1=None, + scopings_containers2=None, + config=None, + server=None, + ): + super().__init__(name="merge::scopings_container", config=config, server=server) + self._inputs = InputsMergeScopingsContainers(self) + self._outputs = OutputsMergeScopingsContainers(self) + if scopings_containers1 is not None: + self.inputs.scopings_containers1.connect(scopings_containers1) + if scopings_containers2 is not None: + self.inputs.scopings_containers2.connect(scopings_containers2) + + @staticmethod + def _spec(): + description = ( + """Take a set of scopings containers and assemble them in a unique one""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="scopings_containers", + type_names=["scopings_container"], + optional=False, + document="""A vector of scopings containers to merge or + scopings containers from pin 0 to ...""", + ), + 1: PinSpecification( + name="scopings_containers", + type_names=["scopings_container"], + optional=False, + document="""A vector of scopings containers to merge or + scopings containers from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_scopings_container", + type_names=["scopings_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::scopings_container", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeScopingsContainers + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeScopingsContainers + """ + return super().outputs + + +class InputsMergeScopingsContainers(_Inputs): + """Intermediate class used to connect user inputs to + merge_scopings_containers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings_containers() + >>> my_scopings_containers1 = dpf.ScopingsContainer() + >>> op.inputs.scopings_containers1.connect(my_scopings_containers1) + >>> my_scopings_containers2 = dpf.ScopingsContainer() + >>> op.inputs.scopings_containers2.connect(my_scopings_containers2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_scopings_containers._spec().inputs, op) + self._scopings_containers1 = Input( + merge_scopings_containers._spec().input_pin(0), 0, op, 0 + ) + self._inputs.append(self._scopings_containers1) + self._scopings_containers2 = Input( + merge_scopings_containers._spec().input_pin(1), 1, op, 1 + ) + self._inputs.append(self._scopings_containers2) + + @property + def scopings_containers1(self): + """Allows to connect scopings_containers1 input to the operator. + + A vector of scopings containers to merge or + scopings containers from pin 0 to ... + + Parameters + ---------- + my_scopings_containers1 : ScopingsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings_containers() + >>> op.inputs.scopings_containers1.connect(my_scopings_containers1) + >>> # or + >>> op.inputs.scopings_containers1(my_scopings_containers1) + """ + return self._scopings_containers1 + + @property + def scopings_containers2(self): + """Allows to connect scopings_containers2 input to the operator. + + A vector of scopings containers to merge or + scopings containers from pin 0 to ... + + Parameters + ---------- + my_scopings_containers2 : ScopingsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings_containers() + >>> op.inputs.scopings_containers2.connect(my_scopings_containers2) + >>> # or + >>> op.inputs.scopings_containers2(my_scopings_containers2) + """ + return self._scopings_containers2 + + +class OutputsMergeScopingsContainers(_Outputs): + """Intermediate class used to get outputs from + merge_scopings_containers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings_containers() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_scopings_container = op.outputs.merged_scopings_container() + """ + + def __init__(self, op: Operator): + super().__init__(merge_scopings_containers._spec().outputs, op) + self._merged_scopings_container = Output( + merge_scopings_containers._spec().output_pin(0), 0, op + ) + self._outputs.append(self._merged_scopings_container) + + @property + def merged_scopings_container(self): + """Allows to get merged_scopings_container output of the operator + + Returns + ---------- + my_merged_scopings_container : ScopingsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_scopings_containers() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_scopings_container = op.outputs.merged_scopings_container() + """ # noqa: E501 + return self._merged_scopings_container diff --git a/ansys/dpf/core/operators/utility/merge_supports.py b/ansys/dpf/core/operators/utility/merge_supports.py new file mode 100644 index 00000000000..821c534c26f --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_supports.py @@ -0,0 +1,223 @@ +""" +merge_supports +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_supports(Operator): + """Take a set of supports and assemble them in a unique one + + Parameters + ---------- + supports1 : AbstractFieldSupport + A vector of supports to merge or supports + from pin 0 to ... + supports2 : AbstractFieldSupport + A vector of supports to merge or supports + from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_supports() + + >>> # Make input connections + >>> my_supports1 = dpf.AbstractFieldSupport() + >>> op.inputs.supports1.connect(my_supports1) + >>> my_supports2 = dpf.AbstractFieldSupport() + >>> op.inputs.supports2.connect(my_supports2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_supports( + ... supports1=my_supports1, + ... supports2=my_supports2, + ... ) + + >>> # Get output data + >>> result_merged_support = op.outputs.merged_support() + """ + + def __init__(self, supports1=None, supports2=None, config=None, server=None): + super().__init__(name="merge::abstract_support", config=config, server=server) + self._inputs = InputsMergeSupports(self) + self._outputs = OutputsMergeSupports(self) + if supports1 is not None: + self.inputs.supports1.connect(supports1) + if supports2 is not None: + self.inputs.supports2.connect(supports2) + + @staticmethod + def _spec(): + description = """Take a set of supports and assemble them in a unique one""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="supports", + type_names=["abstract_field_support"], + optional=False, + document="""A vector of supports to merge or supports + from pin 0 to ...""", + ), + 1: PinSpecification( + name="supports", + type_names=["abstract_field_support"], + optional=False, + document="""A vector of supports to merge or supports + from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_support", + type_names=["abstract_field_support"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::abstract_support", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeSupports + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeSupports + """ + return super().outputs + + +class InputsMergeSupports(_Inputs): + """Intermediate class used to connect user inputs to + merge_supports operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_supports() + >>> my_supports1 = dpf.AbstractFieldSupport() + >>> op.inputs.supports1.connect(my_supports1) + >>> my_supports2 = dpf.AbstractFieldSupport() + >>> op.inputs.supports2.connect(my_supports2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_supports._spec().inputs, op) + self._supports1 = Input(merge_supports._spec().input_pin(0), 0, op, 0) + self._inputs.append(self._supports1) + self._supports2 = Input(merge_supports._spec().input_pin(1), 1, op, 1) + self._inputs.append(self._supports2) + + @property + def supports1(self): + """Allows to connect supports1 input to the operator. + + A vector of supports to merge or supports + from pin 0 to ... + + Parameters + ---------- + my_supports1 : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_supports() + >>> op.inputs.supports1.connect(my_supports1) + >>> # or + >>> op.inputs.supports1(my_supports1) + """ + return self._supports1 + + @property + def supports2(self): + """Allows to connect supports2 input to the operator. + + A vector of supports to merge or supports + from pin 0 to ... + + Parameters + ---------- + my_supports2 : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_supports() + >>> op.inputs.supports2.connect(my_supports2) + >>> # or + >>> op.inputs.supports2(my_supports2) + """ + return self._supports2 + + +class OutputsMergeSupports(_Outputs): + """Intermediate class used to get outputs from + merge_supports operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_supports() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_support = op.outputs.merged_support() + """ + + def __init__(self, op: Operator): + super().__init__(merge_supports._spec().outputs, op) + self._merged_support = Output(merge_supports._spec().output_pin(0), 0, op) + self._outputs.append(self._merged_support) + + @property + def merged_support(self): + """Allows to get merged_support output of the operator + + Returns + ---------- + my_merged_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_supports() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_support = op.outputs.merged_support() + """ # noqa: E501 + return self._merged_support diff --git a/ansys/dpf/core/operators/utility/merge_time_freq_supports.py b/ansys/dpf/core/operators/utility/merge_time_freq_supports.py new file mode 100644 index 00000000000..1d221c5fdea --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_time_freq_supports.py @@ -0,0 +1,237 @@ +""" +merge_time_freq_supports +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_time_freq_supports(Operator): + """Take a set of time/freq support and assemble them in a unique one + + Parameters + ---------- + time_freq_supports1 : TimeFreqSupport + A vector of time/freq supports to merge or + time/freq supports from pin 0 to ... + time_freq_supports2 : TimeFreqSupport + A vector of time/freq supports to merge or + time/freq supports from pin 0 to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_time_freq_supports() + + >>> # Make input connections + >>> my_time_freq_supports1 = dpf.TimeFreqSupport() + >>> op.inputs.time_freq_supports1.connect(my_time_freq_supports1) + >>> my_time_freq_supports2 = dpf.TimeFreqSupport() + >>> op.inputs.time_freq_supports2.connect(my_time_freq_supports2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_time_freq_supports( + ... time_freq_supports1=my_time_freq_supports1, + ... time_freq_supports2=my_time_freq_supports2, + ... ) + + >>> # Get output data + >>> result_merged_support = op.outputs.merged_support() + """ + + def __init__( + self, + time_freq_supports1=None, + time_freq_supports2=None, + config=None, + server=None, + ): + super().__init__(name="merge::time_freq_support", config=config, server=server) + self._inputs = InputsMergeTimeFreqSupports(self) + self._outputs = OutputsMergeTimeFreqSupports(self) + if time_freq_supports1 is not None: + self.inputs.time_freq_supports1.connect(time_freq_supports1) + if time_freq_supports2 is not None: + self.inputs.time_freq_supports2.connect(time_freq_supports2) + + @staticmethod + def _spec(): + description = ( + """Take a set of time/freq support and assemble them in a unique one""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_freq_supports", + type_names=["time_freq_support"], + optional=False, + document="""A vector of time/freq supports to merge or + time/freq supports from pin 0 to ...""", + ), + 1: PinSpecification( + name="time_freq_supports", + type_names=["time_freq_support"], + optional=False, + document="""A vector of time/freq supports to merge or + time/freq supports from pin 0 to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_support", + type_names=["time_freq_support"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="merge::time_freq_support", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeTimeFreqSupports + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeTimeFreqSupports + """ + return super().outputs + + +class InputsMergeTimeFreqSupports(_Inputs): + """Intermediate class used to connect user inputs to + merge_time_freq_supports operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_time_freq_supports() + >>> my_time_freq_supports1 = dpf.TimeFreqSupport() + >>> op.inputs.time_freq_supports1.connect(my_time_freq_supports1) + >>> my_time_freq_supports2 = dpf.TimeFreqSupport() + >>> op.inputs.time_freq_supports2.connect(my_time_freq_supports2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_time_freq_supports._spec().inputs, op) + self._time_freq_supports1 = Input( + merge_time_freq_supports._spec().input_pin(0), 0, op, 0 + ) + self._inputs.append(self._time_freq_supports1) + self._time_freq_supports2 = Input( + merge_time_freq_supports._spec().input_pin(1), 1, op, 1 + ) + self._inputs.append(self._time_freq_supports2) + + @property + def time_freq_supports1(self): + """Allows to connect time_freq_supports1 input to the operator. + + A vector of time/freq supports to merge or + time/freq supports from pin 0 to ... + + Parameters + ---------- + my_time_freq_supports1 : TimeFreqSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_time_freq_supports() + >>> op.inputs.time_freq_supports1.connect(my_time_freq_supports1) + >>> # or + >>> op.inputs.time_freq_supports1(my_time_freq_supports1) + """ + return self._time_freq_supports1 + + @property + def time_freq_supports2(self): + """Allows to connect time_freq_supports2 input to the operator. + + A vector of time/freq supports to merge or + time/freq supports from pin 0 to ... + + Parameters + ---------- + my_time_freq_supports2 : TimeFreqSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_time_freq_supports() + >>> op.inputs.time_freq_supports2.connect(my_time_freq_supports2) + >>> # or + >>> op.inputs.time_freq_supports2(my_time_freq_supports2) + """ + return self._time_freq_supports2 + + +class OutputsMergeTimeFreqSupports(_Outputs): + """Intermediate class used to get outputs from + merge_time_freq_supports operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_time_freq_supports() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_support = op.outputs.merged_support() + """ + + def __init__(self, op: Operator): + super().__init__(merge_time_freq_supports._spec().outputs, op) + self._merged_support = Output( + merge_time_freq_supports._spec().output_pin(0), 0, op + ) + self._outputs.append(self._merged_support) + + @property + def merged_support(self): + """Allows to get merged_support output of the operator + + Returns + ---------- + my_merged_support : TimeFreqSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_time_freq_supports() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_support = op.outputs.merged_support() + """ # noqa: E501 + return self._merged_support diff --git a/ansys/dpf/core/operators/utility/python_generator.py b/ansys/dpf/core/operators/utility/python_generator.py index 85612634507..8cb6b1c518c 100644 --- a/ansys/dpf/core/operators/utility/python_generator.py +++ b/ansys/dpf/core/operators/utility/python_generator.py @@ -1,65 +1,94 @@ """ python_generator -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class python_generator(Operator): """Generates .py file with specifications for loaded plugin(s). - available inputs: - - dll_source_path (str) - - output_path (str) + Parameters + ---------- + dll_source_path : str + output_path : str - available outputs: + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.python_generator() - >>> # Instantiate operator - >>> op = dpf.operators.utility.python_generator() + >>> # Make input connections + >>> my_dll_source_path = str() + >>> op.inputs.dll_source_path.connect(my_dll_source_path) + >>> my_output_path = str() + >>> op.inputs.output_path.connect(my_output_path) - >>> # Make input connections - >>> my_dll_source_path = str() - >>> op.inputs.dll_source_path.connect(my_dll_source_path) - >>> my_output_path = str() - >>> op.inputs.output_path.connect(my_output_path) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.python_generator( + ... dll_source_path=my_dll_source_path, + ... output_path=my_output_path, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.python_generator(dll_source_path=my_dll_source_path,output_path=my_output_path) + """ - >>> # Get output data""" - def __init__(self, dll_source_path=None, output_path=None, config=None, server=None): - super().__init__(name="python_generator", config = config, server = server) + def __init__( + self, dll_source_path=None, output_path=None, config=None, server=None + ): + super().__init__(name="python_generator", config=config, server=server) self._inputs = InputsPythonGenerator(self) self._outputs = OutputsPythonGenerator(self) - if dll_source_path !=None: + if dll_source_path is not None: self.inputs.dll_source_path.connect(dll_source_path) - if output_path !=None: + if output_path is not None: self.inputs.output_path.connect(output_path) @staticmethod def _spec(): - spec = Specification(description="""Generates .py file with specifications for loaded plugin(s).""", - map_input_pin_spec={ - 0 : PinSpecification(name = "dll_source_path", type_names=["string"], optional=False, document=""""""), - 1 : PinSpecification(name = "output_path", type_names=["string"], optional=False, document="""""")}, - map_output_pin_spec={ -}) + description = """Generates .py file with specifications for loaded plugin(s).""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="dll_source_path", + type_names=["string"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="output_path", + type_names=["string"], + optional=False, + document="""""", + ), + }, + map_output_pin_spec={}, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "python_generator") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="python_generator", server=server) @property def inputs(self): @@ -67,94 +96,89 @@ def inputs(self): Returns -------- - inputs : InputsPythonGenerator + inputs : InputsPythonGenerator """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsPythonGenerator + outputs : OutputsPythonGenerator """ return super().outputs -#internal name: python_generator -#scripting name: python_generator class InputsPythonGenerator(_Inputs): - """Intermediate class used to connect user inputs to python_generator operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.python_generator() - >>> my_dll_source_path = str() - >>> op.inputs.dll_source_path.connect(my_dll_source_path) - >>> my_output_path = str() - >>> op.inputs.output_path.connect(my_output_path) + """Intermediate class used to connect user inputs to + python_generator operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.python_generator() + >>> my_dll_source_path = str() + >>> op.inputs.dll_source_path.connect(my_dll_source_path) + >>> my_output_path = str() + >>> op.inputs.output_path.connect(my_output_path) """ + def __init__(self, op: Operator): super().__init__(python_generator._spec().inputs, op) - self._dll_source_path = Input(python_generator._spec().input_pin(0), 0, op, -1) + self._dll_source_path = Input(python_generator._spec().input_pin(0), 0, op, -1) self._inputs.append(self._dll_source_path) - self._output_path = Input(python_generator._spec().input_pin(1), 1, op, -1) + self._output_path = Input(python_generator._spec().input_pin(1), 1, op, -1) self._inputs.append(self._output_path) @property def dll_source_path(self): - """Allows to connect dll_source_path input to the operator + """Allows to connect dll_source_path input to the operator. Parameters ---------- - my_dll_source_path : str, + my_dll_source_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.python_generator() >>> op.inputs.dll_source_path.connect(my_dll_source_path) - >>> #or + >>> # or >>> op.inputs.dll_source_path(my_dll_source_path) - """ return self._dll_source_path @property def output_path(self): - """Allows to connect output_path input to the operator + """Allows to connect output_path input to the operator. Parameters ---------- - my_output_path : str, + my_output_path : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.python_generator() >>> op.inputs.output_path.connect(my_output_path) - >>> #or + >>> # or >>> op.inputs.output_path(my_output_path) - """ return self._output_path -class OutputsPythonGenerator(_Outputs): - """Intermediate class used to get outputs from python_generator operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.python_generator() - >>> # Connect inputs : op.inputs. ... +class OutputsPythonGenerator(_Outputs): + """Intermediate class used to get outputs from + python_generator operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.python_generator() + >>> # Connect inputs : op.inputs. ... """ + def __init__(self, op: Operator): super().__init__(python_generator._spec().outputs, op) - pass - diff --git a/ansys/dpf/core/operators/utility/remote_operator_instantiate.py b/ansys/dpf/core/operators/utility/remote_operator_instantiate.py new file mode 100644 index 00000000000..1d9f416024c --- /dev/null +++ b/ansys/dpf/core/operators/utility/remote_operator_instantiate.py @@ -0,0 +1,357 @@ +""" +remote_operator_instantiate +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class remote_operator_instantiate(Operator): + """Create a local image of an existing remote operator (identified by an + id and an address) for a given protocol registered in the streams. + A workflow is created with this operator and returned in output + + Parameters + ---------- + operator_to_send : int + Local workflow to push to a remote or id of a + remote workflow + output_pin : int + Pin number of the output to name + streams_to_remote : StreamsContainer + data_sources_to_remote : DataSources, optional + output_name : str + Output's name of the workflow to return + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.remote_operator_instantiate() + + >>> # Make input connections + >>> my_operator_to_send = int() + >>> op.inputs.operator_to_send.connect(my_operator_to_send) + >>> my_output_pin = int() + >>> op.inputs.output_pin.connect(my_output_pin) + >>> my_streams_to_remote = dpf.StreamsContainer() + >>> op.inputs.streams_to_remote.connect(my_streams_to_remote) + >>> my_data_sources_to_remote = dpf.DataSources() + >>> op.inputs.data_sources_to_remote.connect(my_data_sources_to_remote) + >>> my_output_name = str() + >>> op.inputs.output_name.connect(my_output_name) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.remote_operator_instantiate( + ... operator_to_send=my_operator_to_send, + ... output_pin=my_output_pin, + ... streams_to_remote=my_streams_to_remote, + ... data_sources_to_remote=my_data_sources_to_remote, + ... output_name=my_output_name, + ... ) + + >>> # Get output data + >>> result_remote_workflow = op.outputs.remote_workflow() + """ + + def __init__( + self, + operator_to_send=None, + output_pin=None, + streams_to_remote=None, + data_sources_to_remote=None, + output_name=None, + config=None, + server=None, + ): + super().__init__( + name="remote_operator_instantiate", config=config, server=server + ) + self._inputs = InputsRemoteOperatorInstantiate(self) + self._outputs = OutputsRemoteOperatorInstantiate(self) + if operator_to_send is not None: + self.inputs.operator_to_send.connect(operator_to_send) + if output_pin is not None: + self.inputs.output_pin.connect(output_pin) + if streams_to_remote is not None: + self.inputs.streams_to_remote.connect(streams_to_remote) + if data_sources_to_remote is not None: + self.inputs.data_sources_to_remote.connect(data_sources_to_remote) + if output_name is not None: + self.inputs.output_name.connect(output_name) + + @staticmethod + def _spec(): + description = """Create a local image of an existing remote operator (identified by an + id and an address) for a given protocol registered in the + streams. A workflow is created with this operator and + returned in output""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="operator_to_send", + type_names=["int32"], + optional=False, + document="""Local workflow to push to a remote or id of a + remote workflow""", + ), + 1: PinSpecification( + name="output_pin", + type_names=["int32"], + optional=False, + document="""Pin number of the output to name""", + ), + 3: PinSpecification( + name="streams_to_remote", + type_names=["streams_container"], + optional=False, + document="""""", + ), + 4: PinSpecification( + name="data_sources_to_remote", + type_names=["data_sources"], + optional=True, + document="""""", + ), + 5: PinSpecification( + name="output_name", + type_names=["string"], + optional=False, + document="""Output's name of the workflow to return""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="remote_workflow", + type_names=["workflow"], + optional=False, + document="""Remote workflow containing an image of the + remote workflow and the protocols + streams""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="remote_operator_instantiate", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsRemoteOperatorInstantiate + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsRemoteOperatorInstantiate + """ + return super().outputs + + +class InputsRemoteOperatorInstantiate(_Inputs): + """Intermediate class used to connect user inputs to + remote_operator_instantiate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_operator_instantiate() + >>> my_operator_to_send = int() + >>> op.inputs.operator_to_send.connect(my_operator_to_send) + >>> my_output_pin = int() + >>> op.inputs.output_pin.connect(my_output_pin) + >>> my_streams_to_remote = dpf.StreamsContainer() + >>> op.inputs.streams_to_remote.connect(my_streams_to_remote) + >>> my_data_sources_to_remote = dpf.DataSources() + >>> op.inputs.data_sources_to_remote.connect(my_data_sources_to_remote) + >>> my_output_name = str() + >>> op.inputs.output_name.connect(my_output_name) + """ + + def __init__(self, op: Operator): + super().__init__(remote_operator_instantiate._spec().inputs, op) + self._operator_to_send = Input( + remote_operator_instantiate._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._operator_to_send) + self._output_pin = Input( + remote_operator_instantiate._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._output_pin) + self._streams_to_remote = Input( + remote_operator_instantiate._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_to_remote) + self._data_sources_to_remote = Input( + remote_operator_instantiate._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources_to_remote) + self._output_name = Input( + remote_operator_instantiate._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._output_name) + + @property + def operator_to_send(self): + """Allows to connect operator_to_send input to the operator. + + Local workflow to push to a remote or id of a + remote workflow + + Parameters + ---------- + my_operator_to_send : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_operator_instantiate() + >>> op.inputs.operator_to_send.connect(my_operator_to_send) + >>> # or + >>> op.inputs.operator_to_send(my_operator_to_send) + """ + return self._operator_to_send + + @property + def output_pin(self): + """Allows to connect output_pin input to the operator. + + Pin number of the output to name + + Parameters + ---------- + my_output_pin : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_operator_instantiate() + >>> op.inputs.output_pin.connect(my_output_pin) + >>> # or + >>> op.inputs.output_pin(my_output_pin) + """ + return self._output_pin + + @property + def streams_to_remote(self): + """Allows to connect streams_to_remote input to the operator. + + Parameters + ---------- + my_streams_to_remote : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_operator_instantiate() + >>> op.inputs.streams_to_remote.connect(my_streams_to_remote) + >>> # or + >>> op.inputs.streams_to_remote(my_streams_to_remote) + """ + return self._streams_to_remote + + @property + def data_sources_to_remote(self): + """Allows to connect data_sources_to_remote input to the operator. + + Parameters + ---------- + my_data_sources_to_remote : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_operator_instantiate() + >>> op.inputs.data_sources_to_remote.connect(my_data_sources_to_remote) + >>> # or + >>> op.inputs.data_sources_to_remote(my_data_sources_to_remote) + """ + return self._data_sources_to_remote + + @property + def output_name(self): + """Allows to connect output_name input to the operator. + + Output's name of the workflow to return + + Parameters + ---------- + my_output_name : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_operator_instantiate() + >>> op.inputs.output_name.connect(my_output_name) + >>> # or + >>> op.inputs.output_name(my_output_name) + """ + return self._output_name + + +class OutputsRemoteOperatorInstantiate(_Outputs): + """Intermediate class used to get outputs from + remote_operator_instantiate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_operator_instantiate() + >>> # Connect inputs : op.inputs. ... + >>> result_remote_workflow = op.outputs.remote_workflow() + """ + + def __init__(self, op: Operator): + super().__init__(remote_operator_instantiate._spec().outputs, op) + self._remote_workflow = Output( + remote_operator_instantiate._spec().output_pin(0), 0, op + ) + self._outputs.append(self._remote_workflow) + + @property + def remote_workflow(self): + """Allows to get remote_workflow output of the operator + + Returns + ---------- + my_remote_workflow : Workflow + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_operator_instantiate() + >>> # Connect inputs : op.inputs. ... + >>> result_remote_workflow = op.outputs.remote_workflow() + """ # noqa: E501 + return self._remote_workflow diff --git a/ansys/dpf/core/operators/utility/remote_workflow_instantiate.py b/ansys/dpf/core/operators/utility/remote_workflow_instantiate.py new file mode 100644 index 00000000000..1cb83a5b7e5 --- /dev/null +++ b/ansys/dpf/core/operators/utility/remote_workflow_instantiate.py @@ -0,0 +1,278 @@ +""" +remote_workflow_instantiate +=============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class remote_workflow_instantiate(Operator): + """Sends a local workflow to a remote process (and keep a local image of + it) or create a local image of an existing remote workflow + (identified by an id and an address) for a given protocol + registered in the streams. + + Parameters + ---------- + workflow_to_send : Workflow or int + Local workflow to push to a remote or id of a + remote workflow + streams_to_remote : StreamsContainer + data_sources_to_remote : DataSources, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.remote_workflow_instantiate() + + >>> # Make input connections + >>> my_workflow_to_send = dpf.Workflow() + >>> op.inputs.workflow_to_send.connect(my_workflow_to_send) + >>> my_streams_to_remote = dpf.StreamsContainer() + >>> op.inputs.streams_to_remote.connect(my_streams_to_remote) + >>> my_data_sources_to_remote = dpf.DataSources() + >>> op.inputs.data_sources_to_remote.connect(my_data_sources_to_remote) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.remote_workflow_instantiate( + ... workflow_to_send=my_workflow_to_send, + ... streams_to_remote=my_streams_to_remote, + ... data_sources_to_remote=my_data_sources_to_remote, + ... ) + + >>> # Get output data + >>> result_remote_workflow = op.outputs.remote_workflow() + """ + + def __init__( + self, + workflow_to_send=None, + streams_to_remote=None, + data_sources_to_remote=None, + config=None, + server=None, + ): + super().__init__( + name="remote_workflow_instantiate", config=config, server=server + ) + self._inputs = InputsRemoteWorkflowInstantiate(self) + self._outputs = OutputsRemoteWorkflowInstantiate(self) + if workflow_to_send is not None: + self.inputs.workflow_to_send.connect(workflow_to_send) + if streams_to_remote is not None: + self.inputs.streams_to_remote.connect(streams_to_remote) + if data_sources_to_remote is not None: + self.inputs.data_sources_to_remote.connect(data_sources_to_remote) + + @staticmethod + def _spec(): + description = """Sends a local workflow to a remote process (and keep a local image of + it) or create a local image of an existing remote workflow + (identified by an id and an address) for a given protocol + registered in the streams.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="workflow_to_send", + type_names=["workflow", "int32"], + optional=False, + document="""Local workflow to push to a remote or id of a + remote workflow""", + ), + 3: PinSpecification( + name="streams_to_remote", + type_names=["streams_container"], + optional=False, + document="""""", + ), + 4: PinSpecification( + name="data_sources_to_remote", + type_names=["data_sources"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="remote_workflow", + type_names=["workflow"], + optional=False, + document="""Remote workflow containing an image of the + remote workflow and the protocols + streams""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config( + name="remote_workflow_instantiate", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsRemoteWorkflowInstantiate + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsRemoteWorkflowInstantiate + """ + return super().outputs + + +class InputsRemoteWorkflowInstantiate(_Inputs): + """Intermediate class used to connect user inputs to + remote_workflow_instantiate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_workflow_instantiate() + >>> my_workflow_to_send = dpf.Workflow() + >>> op.inputs.workflow_to_send.connect(my_workflow_to_send) + >>> my_streams_to_remote = dpf.StreamsContainer() + >>> op.inputs.streams_to_remote.connect(my_streams_to_remote) + >>> my_data_sources_to_remote = dpf.DataSources() + >>> op.inputs.data_sources_to_remote.connect(my_data_sources_to_remote) + """ + + def __init__(self, op: Operator): + super().__init__(remote_workflow_instantiate._spec().inputs, op) + self._workflow_to_send = Input( + remote_workflow_instantiate._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._workflow_to_send) + self._streams_to_remote = Input( + remote_workflow_instantiate._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_to_remote) + self._data_sources_to_remote = Input( + remote_workflow_instantiate._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources_to_remote) + + @property + def workflow_to_send(self): + """Allows to connect workflow_to_send input to the operator. + + Local workflow to push to a remote or id of a + remote workflow + + Parameters + ---------- + my_workflow_to_send : Workflow or int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_workflow_instantiate() + >>> op.inputs.workflow_to_send.connect(my_workflow_to_send) + >>> # or + >>> op.inputs.workflow_to_send(my_workflow_to_send) + """ + return self._workflow_to_send + + @property + def streams_to_remote(self): + """Allows to connect streams_to_remote input to the operator. + + Parameters + ---------- + my_streams_to_remote : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_workflow_instantiate() + >>> op.inputs.streams_to_remote.connect(my_streams_to_remote) + >>> # or + >>> op.inputs.streams_to_remote(my_streams_to_remote) + """ + return self._streams_to_remote + + @property + def data_sources_to_remote(self): + """Allows to connect data_sources_to_remote input to the operator. + + Parameters + ---------- + my_data_sources_to_remote : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_workflow_instantiate() + >>> op.inputs.data_sources_to_remote.connect(my_data_sources_to_remote) + >>> # or + >>> op.inputs.data_sources_to_remote(my_data_sources_to_remote) + """ + return self._data_sources_to_remote + + +class OutputsRemoteWorkflowInstantiate(_Outputs): + """Intermediate class used to get outputs from + remote_workflow_instantiate operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_workflow_instantiate() + >>> # Connect inputs : op.inputs. ... + >>> result_remote_workflow = op.outputs.remote_workflow() + """ + + def __init__(self, op: Operator): + super().__init__(remote_workflow_instantiate._spec().outputs, op) + self._remote_workflow = Output( + remote_workflow_instantiate._spec().output_pin(0), 0, op + ) + self._outputs.append(self._remote_workflow) + + @property + def remote_workflow(self): + """Allows to get remote_workflow output of the operator + + Returns + ---------- + my_remote_workflow : Workflow + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.remote_workflow_instantiate() + >>> # Connect inputs : op.inputs. ... + >>> result_remote_workflow = op.outputs.remote_workflow() + """ # noqa: E501 + return self._remote_workflow diff --git a/ansys/dpf/core/operators/utility/scalars_to_field.py b/ansys/dpf/core/operators/utility/scalars_to_field.py index a31d8490e01..1782e30a15e 100644 --- a/ansys/dpf/core/operators/utility/scalars_to_field.py +++ b/ansys/dpf/core/operators/utility/scalars_to_field.py @@ -1,60 +1,107 @@ """ scalars_to_field -================ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class scalars_to_field(Operator): - """take a double or a vector of double and transform it in a one entity field of location "numeric". - - available inputs: - - double_or_vector_double (float, list) - - available outputs: - - field (Field) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.utility.scalars_to_field() - - >>> # Make input connections - >>> my_double_or_vector_double = float() - >>> op.inputs.double_or_vector_double.connect(my_double_or_vector_double) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.scalars_to_field(double_or_vector_double=my_double_or_vector_double) + """take a double or a vector of double and transform it in a one entity + field of location 'numeric'. + + Parameters + ---------- + double_or_vector_double : float + Double or vector of double + unit : str, optional + Unit symbole (m, hz, kg, ...) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.scalars_to_field() + + >>> # Make input connections + >>> my_double_or_vector_double = float() + >>> op.inputs.double_or_vector_double.connect(my_double_or_vector_double) + >>> my_unit = str() + >>> op.inputs.unit.connect(my_unit) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.scalars_to_field( + ... double_or_vector_double=my_double_or_vector_double, + ... unit=my_unit, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, double_or_vector_double=None, config=None, server=None): - super().__init__(name="fieldify", config = config, server = server) + def __init__( + self, double_or_vector_double=None, unit=None, config=None, server=None + ): + super().__init__(name="fieldify", config=config, server=server) self._inputs = InputsScalarsToField(self) self._outputs = OutputsScalarsToField(self) - if double_or_vector_double !=None: + if double_or_vector_double is not None: self.inputs.double_or_vector_double.connect(double_or_vector_double) + if unit is not None: + self.inputs.unit.connect(unit) @staticmethod def _spec(): - spec = Specification(description="""take a double or a vector of double and transform it in a one entity field of location "numeric".""", - map_input_pin_spec={ - 0 : PinSpecification(name = "double_or_vector_double", type_names=["double","vector"], optional=False, document="""double or vector of double""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """take a double or a vector of double and transform it in a one entity + field of location "numeric".""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="double_or_vector_double", + type_names=["double", "vector"], + optional=False, + document="""Double or vector of double""", + ), + 1: PinSpecification( + name="unit", + type_names=["string"], + optional=True, + document="""Unit symbole (m, hz, kg, ...)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "fieldify") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="fieldify", server=server) @property def inputs(self): @@ -62,93 +109,115 @@ def inputs(self): Returns -------- - inputs : InputsScalarsToField + inputs : InputsScalarsToField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsScalarsToField + outputs : OutputsScalarsToField """ return super().outputs -#internal name: fieldify -#scripting name: scalars_to_field class InputsScalarsToField(_Inputs): - """Intermediate class used to connect user inputs to scalars_to_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.scalars_to_field() - >>> my_double_or_vector_double = float() - >>> op.inputs.double_or_vector_double.connect(my_double_or_vector_double) + """Intermediate class used to connect user inputs to + scalars_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.scalars_to_field() + >>> my_double_or_vector_double = float() + >>> op.inputs.double_or_vector_double.connect(my_double_or_vector_double) + >>> my_unit = str() + >>> op.inputs.unit.connect(my_unit) """ + def __init__(self, op: Operator): super().__init__(scalars_to_field._spec().inputs, op) - self._double_or_vector_double = Input(scalars_to_field._spec().input_pin(0), 0, op, -1) + self._double_or_vector_double = Input( + scalars_to_field._spec().input_pin(0), 0, op, -1 + ) self._inputs.append(self._double_or_vector_double) + self._unit = Input(scalars_to_field._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._unit) @property def double_or_vector_double(self): - """Allows to connect double_or_vector_double input to the operator + """Allows to connect double_or_vector_double input to the operator. - - pindoc: double or vector of double + Double or vector of double Parameters ---------- - my_double_or_vector_double : float, list, + my_double_or_vector_double : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.scalars_to_field() >>> op.inputs.double_or_vector_double.connect(my_double_or_vector_double) - >>> #or + >>> # or >>> op.inputs.double_or_vector_double(my_double_or_vector_double) - """ return self._double_or_vector_double + @property + def unit(self): + """Allows to connect unit input to the operator. + + Unit symbole (m, hz, kg, ...) + + Parameters + ---------- + my_unit : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.scalars_to_field() + >>> op.inputs.unit.connect(my_unit) + >>> # or + >>> op.inputs.unit(my_unit) + """ + return self._unit + + class OutputsScalarsToField(_Outputs): - """Intermediate class used to get outputs from scalars_to_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.scalars_to_field() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + scalars_to_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.scalars_to_field() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(scalars_to_field._spec().outputs, op) - self._field = Output(scalars_to_field._spec().output_pin(0), 0, op) + self._field = Output(scalars_to_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.scalars_to_field() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/utility/set_property.py b/ansys/dpf/core/operators/utility/set_property.py index fc3bf89cdbb..6ad6cd16437 100644 --- a/ansys/dpf/core/operators/utility/set_property.py +++ b/ansys/dpf/core/operators/utility/set_property.py @@ -1,72 +1,123 @@ """ set_property -============ +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class set_property(Operator): """Set a property to an input field/field container - available inputs: - - field (Field, FieldsContainer) - - property_name (str) - - property_value (str, int, float) - - available outputs: - - field (Field ,FieldsContainer) - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.utility.set_property() - - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_property_name = str() - >>> op.inputs.property_name.connect(my_property_name) - >>> my_property_value = str() - >>> op.inputs.property_value.connect(my_property_value) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.set_property(field=my_field,property_name=my_property_name,property_value=my_property_value) + Parameters + ---------- + field : Field or FieldsContainer + property_name : str + Property to set + property_value : str or int or float + Property to set + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.set_property() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_property_name = str() + >>> op.inputs.property_name.connect(my_property_name) + >>> my_property_value = str() + >>> op.inputs.property_value.connect(my_property_value) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.set_property( + ... field=my_field, + ... property_name=my_property_name, + ... property_value=my_property_value, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" - def __init__(self, field=None, property_name=None, property_value=None, config=None, server=None): - super().__init__(name="field::set_property", config = config, server = server) + def __init__( + self, + field=None, + property_name=None, + property_value=None, + config=None, + server=None, + ): + super().__init__(name="field::set_property", config=config, server=server) self._inputs = InputsSetProperty(self) self._outputs = OutputsSetProperty(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) - if property_name !=None: + if property_name is not None: self.inputs.property_name.connect(property_name) - if property_value !=None: + if property_value is not None: self.inputs.property_value.connect(property_value) @staticmethod def _spec(): - spec = Specification(description="""Set a property to an input field/field container""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document=""""""), - 1 : PinSpecification(name = "property_name", type_names=["string"], optional=False, document="""Property to set"""), - 2 : PinSpecification(name = "property_value", type_names=["string","int32","double"], optional=False, document="""Property to set""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""""")}) + description = """Set a property to an input field/field container""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="property_name", + type_names=["string"], + optional=False, + document="""Property to set""", + ), + 2: PinSpecification( + name="property_value", + type_names=["string", "int32", "double"], + optional=False, + document="""Property to set""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "field::set_property") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="field::set_property", server=server) @property def inputs(self): @@ -74,126 +125,132 @@ def inputs(self): Returns -------- - inputs : InputsSetProperty + inputs : InputsSetProperty """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsSetProperty + outputs : OutputsSetProperty """ return super().outputs -#internal name: field::set_property -#scripting name: set_property class InputsSetProperty(_Inputs): - """Intermediate class used to connect user inputs to set_property operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.set_property() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - >>> my_property_name = str() - >>> op.inputs.property_name.connect(my_property_name) - >>> my_property_value = str() - >>> op.inputs.property_value.connect(my_property_value) + """Intermediate class used to connect user inputs to + set_property operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.set_property() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_property_name = str() + >>> op.inputs.property_name.connect(my_property_name) + >>> my_property_value = str() + >>> op.inputs.property_value.connect(my_property_value) """ + def __init__(self, op: Operator): super().__init__(set_property._spec().inputs, op) - self._field = Input(set_property._spec().input_pin(0), 0, op, -1) + self._field = Input(set_property._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._property_name = Input(set_property._spec().input_pin(1), 1, op, -1) + self._property_name = Input(set_property._spec().input_pin(1), 1, op, -1) self._inputs.append(self._property_name) - self._property_value = Input(set_property._spec().input_pin(2), 2, op, -1) + self._property_value = Input(set_property._spec().input_pin(2), 2, op, -1) self._inputs.append(self._property_value) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.set_property() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field @property def property_name(self): - """Allows to connect property_name input to the operator + """Allows to connect property_name input to the operator. - - pindoc: Property to set + Property to set Parameters ---------- - my_property_name : str, + my_property_name : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.set_property() >>> op.inputs.property_name.connect(my_property_name) - >>> #or + >>> # or >>> op.inputs.property_name(my_property_name) - """ return self._property_name @property def property_value(self): - """Allows to connect property_value input to the operator + """Allows to connect property_value input to the operator. - - pindoc: Property to set + Property to set Parameters ---------- - my_property_value : str, int, float, + my_property_value : str or int or float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.set_property() >>> op.inputs.property_value.connect(my_property_value) - >>> #or + >>> # or >>> op.inputs.property_value(my_property_value) - """ return self._property_value + class OutputsSetProperty(_Outputs): - """Intermediate class used to get outputs from set_property operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.set_property() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + set_property operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.set_property() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(set_property._spec().outputs, op) - self.field_as_field = Output( _modify_output_spec_with_one_type(set_property._spec().output_pin(0), "field"), 0, op) + self.field_as_field = Output( + _modify_output_spec_with_one_type( + set_property._spec().output_pin(0), "field" + ), + 0, + op, + ) self._outputs.append(self.field_as_field) - self.field_as_fields_container = Output( _modify_output_spec_with_one_type(set_property._spec().output_pin(0), "fields_container"), 0, op) + self.field_as_fields_container = Output( + _modify_output_spec_with_one_type( + set_property._spec().output_pin(0), "fields_container" + ), + 0, + op, + ) self._outputs.append(self.field_as_fields_container) - diff --git a/ansys/dpf/core/operators/utility/strain_from_voigt.py b/ansys/dpf/core/operators/utility/strain_from_voigt.py index 6ac5bac90a0..452220eb443 100644 --- a/ansys/dpf/core/operators/utility/strain_from_voigt.py +++ b/ansys/dpf/core/operators/utility/strain_from_voigt.py @@ -1,60 +1,92 @@ """ strain_from_voigt -================= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class strain_from_voigt(Operator): """Put strain field from Voigt notation to standard format. - available inputs: - - field (Field, FieldsContainer) + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.strain_from_voigt() - >>> # Instantiate operator - >>> op = dpf.operators.utility.strain_from_voigt() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.strain_from_voigt( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.strain_from_voigt(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="strain_from_voigt", config = config, server = server) + super().__init__(name="strain_from_voigt", config=config, server=server) self._inputs = InputsStrainFromVoigt(self) self._outputs = OutputsStrainFromVoigt(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Put strain field from Voigt notation to standard format.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Put strain field from Voigt notation to standard format.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "strain_from_voigt") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="strain_from_voigt", server=server) @property def inputs(self): @@ -62,93 +94,90 @@ def inputs(self): Returns -------- - inputs : InputsStrainFromVoigt + inputs : InputsStrainFromVoigt """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsStrainFromVoigt + outputs : OutputsStrainFromVoigt """ return super().outputs -#internal name: strain_from_voigt -#scripting name: strain_from_voigt class InputsStrainFromVoigt(_Inputs): - """Intermediate class used to connect user inputs to strain_from_voigt operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.strain_from_voigt() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + strain_from_voigt operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.strain_from_voigt() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(strain_from_voigt._spec().inputs, op) - self._field = Input(strain_from_voigt._spec().input_pin(0), 0, op, -1) + self._field = Input(strain_from_voigt._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.strain_from_voigt() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsStrainFromVoigt(_Outputs): - """Intermediate class used to get outputs from strain_from_voigt operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.strain_from_voigt() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + strain_from_voigt operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.strain_from_voigt() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(strain_from_voigt._spec().outputs, op) - self._field = Output(strain_from_voigt._spec().output_pin(0), 0, op) + self._field = Output(strain_from_voigt._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.strain_from_voigt() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/operators/utility/txt_file_to_dpf.py b/ansys/dpf/core/operators/utility/txt_file_to_dpf.py index 064fd845b92..a38aa2fc4d2 100644 --- a/ansys/dpf/core/operators/utility/txt_file_to_dpf.py +++ b/ansys/dpf/core/operators/utility/txt_file_to_dpf.py @@ -1,63 +1,97 @@ """ txt_file_to_dpf =============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class txt_file_to_dpf(Operator): """Take an input string and parse it into dataProcessing type. - available inputs: - - input_string (str) + Parameters + ---------- + input_string : str + Ex: 'double:1.0', 'int:1', + 'vector:1.0;1.0' + - available outputs: - - any_output1 () - - any_output2 () + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.txt_file_to_dpf() - >>> # Instantiate operator - >>> op = dpf.operators.utility.txt_file_to_dpf() + >>> # Make input connections + >>> my_input_string = str() + >>> op.inputs.input_string.connect(my_input_string) - >>> # Make input connections - >>> my_input_string = str() - >>> op.inputs.input_string.connect(my_input_string) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.txt_file_to_dpf( + ... input_string=my_input_string, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.txt_file_to_dpf(input_string=my_input_string) + >>> # Get output data + >>> result_any_output1 = op.outputs.any_output1() + >>> result_any_output2 = op.outputs.any_output2() + """ - >>> # Get output data - >>> result_any_output1 = op.outputs.any_output1() - >>> result_any_output2 = op.outputs.any_output2()""" def __init__(self, input_string=None, config=None, server=None): - super().__init__(name="text_parser", config = config, server = server) + super().__init__(name="text_parser", config=config, server=server) self._inputs = InputsTxtFileToDpf(self) self._outputs = OutputsTxtFileToDpf(self) - if input_string !=None: + if input_string is not None: self.inputs.input_string.connect(input_string) @staticmethod def _spec(): - spec = Specification(description="""Take an input string and parse it into dataProcessing type.""", - map_input_pin_spec={ - 0 : PinSpecification(name = "input_string", type_names=["string"], optional=False, document="""ex: 'double:1.0', 'int:1', 'vector:1.0;1.0'""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "any_output", type_names=[], optional=False, document="""any output"""), - 1 : PinSpecification(name = "any_output", type_names=[], optional=False, document="""any output""")}) + description = """Take an input string and parse it into dataProcessing type.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="input_string", + type_names=["string"], + optional=False, + document="""Ex: 'double:1.0', 'int:1', + 'vector:1.0;1.0'""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="any_output1", + optional=False, + document="""Any output""", + ), + 1: PinSpecification( + name="any_output2", + optional=False, + document="""Any output""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "text_parser") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="text_parser", server=server) @property def inputs(self): @@ -65,72 +99,110 @@ def inputs(self): Returns -------- - inputs : InputsTxtFileToDpf + inputs : InputsTxtFileToDpf """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsTxtFileToDpf + outputs : OutputsTxtFileToDpf """ return super().outputs -#internal name: text_parser -#scripting name: txt_file_to_dpf class InputsTxtFileToDpf(_Inputs): - """Intermediate class used to connect user inputs to txt_file_to_dpf operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.txt_file_to_dpf() - >>> my_input_string = str() - >>> op.inputs.input_string.connect(my_input_string) + """Intermediate class used to connect user inputs to + txt_file_to_dpf operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.txt_file_to_dpf() + >>> my_input_string = str() + >>> op.inputs.input_string.connect(my_input_string) """ + def __init__(self, op: Operator): super().__init__(txt_file_to_dpf._spec().inputs, op) - self._input_string = Input(txt_file_to_dpf._spec().input_pin(0), 0, op, -1) + self._input_string = Input(txt_file_to_dpf._spec().input_pin(0), 0, op, -1) self._inputs.append(self._input_string) @property def input_string(self): - """Allows to connect input_string input to the operator + """Allows to connect input_string input to the operator. - - pindoc: ex: 'double:1.0', 'int:1', 'vector:1.0;1.0' + Ex: 'double:1.0', 'int:1', + 'vector:1.0;1.0' Parameters ---------- - my_input_string : str, + my_input_string : str Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.txt_file_to_dpf() >>> op.inputs.input_string.connect(my_input_string) - >>> #or + >>> # or >>> op.inputs.input_string(my_input_string) - """ return self._input_string -class OutputsTxtFileToDpf(_Outputs): - """Intermediate class used to get outputs from txt_file_to_dpf operator - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.txt_file_to_dpf() - >>> # Connect inputs : op.inputs. ... +class OutputsTxtFileToDpf(_Outputs): + """Intermediate class used to get outputs from + txt_file_to_dpf operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.txt_file_to_dpf() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output1 = op.outputs.any_output1() + >>> result_any_output2 = op.outputs.any_output2() """ + def __init__(self, op: Operator): super().__init__(txt_file_to_dpf._spec().outputs, op) - pass + self._any_output1 = Output(txt_file_to_dpf._spec().output_pin(0), 0, op) + self._outputs.append(self._any_output1) + self._any_output2 = Output(txt_file_to_dpf._spec().output_pin(1), 1, op) + self._outputs.append(self._any_output2) + @property + def any_output1(self): + """Allows to get any_output1 output of the operator + + Returns + ---------- + my_any_output1 : + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.txt_file_to_dpf() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output1 = op.outputs.any_output1() + """ # noqa: E501 + return self._any_output1 + + @property + def any_output2(self): + """Allows to get any_output2 output of the operator + + Returns + ---------- + my_any_output2 : + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.txt_file_to_dpf() + >>> # Connect inputs : op.inputs. ... + >>> result_any_output2 = op.outputs.any_output2() + """ # noqa: E501 + return self._any_output2 diff --git a/ansys/dpf/core/operators/utility/unitary_field.py b/ansys/dpf/core/operators/utility/unitary_field.py index c1039ed6bac..0ff27d234d8 100644 --- a/ansys/dpf/core/operators/utility/unitary_field.py +++ b/ansys/dpf/core/operators/utility/unitary_field.py @@ -1,60 +1,94 @@ """ unitary_field -============= +=============== +Autogenerated DPF operator classes. """ +from warnings import warn from ansys.dpf.core.dpf_operator import Operator from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs, _modify_output_spec_with_one_type +from ansys.dpf.core.outputs import Output, _Outputs from ansys.dpf.core.operators.specification import PinSpecification, Specification -"""Operators from Ans.Dpf.Native plugin, from "utility" category -""" class unitary_field(Operator): - """Take a field and returns an other field of scalars on the same location and scoping as the input field + """Take a field and returns an other field of scalars on the same + location and scoping as the input field + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected - available inputs: - - field (Field, FieldsContainer) - available outputs: - - field (Field) + Examples + -------- + >>> from ansys.dpf import core as dpf - Examples - -------- - >>> from ansys.dpf import core as dpf + >>> # Instantiate operator + >>> op = dpf.operators.utility.unitary_field() - >>> # Instantiate operator - >>> op = dpf.operators.utility.unitary_field() + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) - >>> # Make input connections - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.unitary_field( + ... field=my_field, + ... ) - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.utility.unitary_field(field=my_field) + >>> # Get output data + >>> result_field = op.outputs.field() + """ - >>> # Get output data - >>> result_field = op.outputs.field()""" def __init__(self, field=None, config=None, server=None): - super().__init__(name="make_unit", config = config, server = server) + super().__init__(name="make_unit", config=config, server=server) self._inputs = InputsUnitaryField(self) self._outputs = OutputsUnitaryField(self) - if field !=None: + if field is not None: self.inputs.field.connect(field) @staticmethod def _spec(): - spec = Specification(description="""Take a field and returns an other field of scalars on the same location and scoping as the input field""", - map_input_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field","fields_container"], optional=False, document="""field or fields container with only one field is expected""")}, - map_output_pin_spec={ - 0 : PinSpecification(name = "field", type_names=["field"], optional=False, document="""""")}) + description = """Take a field and returns an other field of scalars on the same + location and scoping as the input field""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) return spec - @staticmethod - def default_config(): - return Operator.default_config(name = "make_unit") + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the the global server. + """ + return Operator.default_config(name="make_unit", server=server) @property def inputs(self): @@ -62,93 +96,90 @@ def inputs(self): Returns -------- - inputs : InputsUnitaryField + inputs : InputsUnitaryField """ return super().inputs - @property def outputs(self): """Enables to get outputs of the operator by evaluationg it Returns -------- - outputs : OutputsUnitaryField + outputs : OutputsUnitaryField """ return super().outputs -#internal name: make_unit -#scripting name: unitary_field class InputsUnitaryField(_Inputs): - """Intermediate class used to connect user inputs to unitary_field operator - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.unitary_field() - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) + """Intermediate class used to connect user inputs to + unitary_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.unitary_field() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) """ + def __init__(self, op: Operator): super().__init__(unitary_field._spec().inputs, op) - self._field = Input(unitary_field._spec().input_pin(0), 0, op, -1) + self._field = Input(unitary_field._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) @property def field(self): - """Allows to connect field input to the operator + """Allows to connect field input to the operator. - - pindoc: field or fields container with only one field is expected + Field or fields container with only one field + is expected Parameters ---------- - my_field : Field, FieldsContainer, + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.unitary_field() >>> op.inputs.field.connect(my_field) - >>> #or + >>> # or >>> op.inputs.field(my_field) - """ return self._field + class OutputsUnitaryField(_Outputs): - """Intermediate class used to get outputs from unitary_field operator - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> op = dpf.operators.utility.unitary_field() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() + """Intermediate class used to get outputs from + unitary_field operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.unitary_field() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() """ + def __init__(self, op: Operator): super().__init__(unitary_field._spec().outputs, op) - self._field = Output(unitary_field._spec().output_pin(0), 0, op) + self._field = Output(unitary_field._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property def field(self): """Allows to get field output of the operator - Returns ---------- - my_field : Field, + my_field : Field Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.utility.unitary_field() >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ + >>> result_field = op.outputs.field() + """ # noqa: E501 return self._field - diff --git a/ansys/dpf/core/outputs.py b/ansys/dpf/core/outputs.py index 6717347dbde..6b597aaa620 100644 --- a/ansys/dpf/core/outputs.py +++ b/ansys/dpf/core/outputs.py @@ -113,6 +113,7 @@ def _make_printable_type(type): def _modify_output_spec_with_one_type(output_spec, type): + from ansys.dpf.core.dpf_operator import PinSpecification if isinstance(output_spec, operator_pb2.PinSpecification): spec = ( operator_pb2.PinSpecification() @@ -121,8 +122,7 @@ def _modify_output_spec_with_one_type(output_spec, type): _clearRepeatedMessage(spec.type_names) spec.type_names.extend([type]) else: - spec = output_spec - spec.type_names = [type] + spec = PinSpecification._get_copy(output_spec, [type]) return spec diff --git a/ansys/dpf/core/path_utilities.py b/ansys/dpf/core/path_utilities.py new file mode 100644 index 00000000000..8b489d7bc25 --- /dev/null +++ b/ansys/dpf/core/path_utilities.py @@ -0,0 +1,86 @@ +""" +path_utilities +============== +Offer tools similar to os.path but taking the os of the +server into account to create path. +""" + +import os + +from ansys.dpf.core import server as server_module + + +def join(*args, **kwargs): + """Join two strings to form a path, following the server + architecture. + Using a server version below 3.0, please ensure that the + python client and the server's os are similar before + using this method. + + Parameters + ---------- + args : str, DPFServer + Path to join and optionally a server. + + kwargs : DPFServer + server=. + + server : Server + Specific server to use. + + Returns + ------- + concatenated_file_path : str + left_path + right_path concatenated into a single string value. + + """ + server = None + parts = [] + for a in args: + if isinstance(a, str) and len(a) > 0: + parts.append(a) + elif isinstance(a, server_module.DpfServer): + server = a + if "server" in kwargs: + server = kwargs["server"] + if not server: + server = server_module._global_server() + if not server: + if server_module.RUNNING_DOCKER["use_docker"]: + current_os = "posix" + else: + return os.path.join(*args) + else: + current_os = server.os + + if len(parts) == 0: + return "" + separator = "\\" + if current_os == 'posix': + separator = "/" + path_to_return = parts[0] + for ipath in range(1, len(parts)): + path_to_return += separator + parts[ipath] + return path_to_return + +def to_server_os(path, server=None): + if not server: + server = server_module._global_server() + if not server: + return path + if server.os == 'posix': + return path.replace("\\", "/") + else: + return path.replace("/", "\\") + +def downloaded_example_path(server = None): + on_docker = server_module.RUNNING_DOCKER["use_docker"] + if not server: + server = server_module._global_server() + if server: + on_docker = server.on_docker + if on_docker: # pragma: no cover + return "/tmp/downloaded_examples" + else: + from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH + return LOCAL_DOWNLOADED_EXAMPLES_PATH diff --git a/ansys/dpf/core/plotter.py b/ansys/dpf/core/plotter.py index 3e2f7e1d0ae..a9b421c114c 100755 --- a/ansys/dpf/core/plotter.py +++ b/ansys/dpf/core/plotter.py @@ -1,7 +1,11 @@ -"""This module contains the DPF plotter class. +""" +Plotter +======= +This module contains the DPF plotter class. Contains classes used to plot a mesh and a fields container using PyVista. """ + import tempfile import os import sys @@ -393,7 +397,7 @@ def plot_contour( ) sl = shell_layers changeOp.inputs.e_shell_layer.connect(sl.value) # top layers taken - fields_container = changeOp.outputs.fields_container() + fields_container = changeOp.get_output(0, core.types.fields_container) break # Merge field data into a single array diff --git a/ansys/dpf/core/property_field.py b/ansys/dpf/core/property_field.py index ed1895d8d50..c3c460f7916 100644 --- a/ansys/dpf/core/property_field.py +++ b/ansys/dpf/core/property_field.py @@ -139,12 +139,13 @@ def as_local_field(self): >>> with field_to_local.as_local_field() as f: ... for i in range(1,num_entities+1): ... f.append(list(range(i,i+3)),i) - ... f.get_entity_data(i-1) - array([1, 2, 3]) - array([2, 3, 4]) - array([3, 4, 5]) - array([4, 5, 6]) - array([5, 6, 7]) + ... print(f.get_entity_data(i-1)) + [1 2 3] + [2 3 4] + [3 4 5] + [4 5 6] + [5 6 7] + """ return _LocalPropertyField(self) diff --git a/ansys/dpf/core/result_info.py b/ansys/dpf/core/result_info.py index d1975c04370..ca415fb8ee6 100644 --- a/ansys/dpf/core/result_info.py +++ b/ansys/dpf/core/result_info.py @@ -11,6 +11,7 @@ from ansys.dpf.core.cyclic_support import CyclicSupport from ansys.dpf.core.common import __write_enum_doc__ from ansys.dpf.core.cache import class_handling_cache +from ansys.dpf.core.check_version import server_meet_version, version_requires names = [m for m in result_info_pb2.PhysicsType.keys()] @@ -116,9 +117,19 @@ def analysis_type(self): 'static' """ + if server_meet_version("3.0", self._server): + return self._get_property("analysis_type") + intOut = self._get_list().analysis_type return result_info_pb2.AnalysisType.Name(intOut).lower() + @version_requires("3.0") + def _get_property(self, property_name): + request = result_info_pb2.GetStringPropertiesRequest() + request.result_info.CopyFrom(self._message) + request.property_names.extend([property_name]) + return self._stub.GetStringProperties(request).properties[property_name] + @property def physics_type(self): """Type of the physics. @@ -145,6 +156,8 @@ def _get_physics_type(self): physics_type : str Type of the physics, such as mechanical or electric. """ + if server_meet_version("3.0", self._server): + return self._get_property("physics_type") intOut = self._get_list().physics_type return result_info_pb2.PhysicsType.Name(intOut).lower() @@ -152,11 +165,16 @@ def _get_physics_type(self): @property def n_results(self): """Number of results.""" + if server_meet_version("3.0", self._server): + str_num = self._get_property("results_count") + return int(str_num) return self._get_list().nresult @property def unit_system(self): """Unit system of the result.""" + if server_meet_version("3.0", self._server): + return self._get_property("unit_system_name") val = self._get_list().unit_system return map_unit_system[val] @@ -209,44 +227,61 @@ def cyclic_support(self): @property def unit_system_name(self): """Name of the unit system.""" + if server_meet_version("3.0", self._server): + return self._get_property("unit_system_name") return self._get_list().unit_system_name @property def solver_version(self): """Version of the solver.""" - major = self._stub.List(self._message).solver_major_version - minor = self._stub.List(self._message).solver_minor_version + if server_meet_version("3.0", self._server): + return self._get_property("solver_version") + list = self._get_list() + major = list.solver_major_version + minor = list.solver_minor_version version = str(major) + "." + str(minor) return version @property def solver_date(self): """Date of the solver.""" + if server_meet_version("3.0", self._server): + return int(self._get_property("solver_date")) return self._get_list().solver_date @property def solver_time(self): """Time of the solver.""" + if server_meet_version("3.0", self._server): + return int(self._get_property("solver_time")) return self._get_list().solver_time @property def user_name(self): """Name of the user.""" + if server_meet_version("3.0", self._server): + return self._get_property("user_name") return self._get_list().user_name @property def job_name(self): """Name of the job.""" + if server_meet_version("3.0", self._server): + return self._get_property("job_name") return self._get_list().job_name @property def product_name(self): """Name of the product.""" + if server_meet_version("3.0", self._server): + return self._get_property("product_name") return self._get_list().product_name @property def main_title(self): """Main title.""" + if server_meet_version("3.0", self._server): + return self._get_property("main_title") return self._get_list().main_title @property @@ -288,7 +323,7 @@ def _get_result(self, numres): def __len__(self): try: - return self._get_list().nresult + return self.n_results except: return 0 @@ -321,4 +356,4 @@ def __del__(self): def _get_list(self): return self._stub.List(self._message) - _to_cache = {_get_list: None, _get_result: None} + _to_cache = {_get_list: None, _get_result: None, _get_property: None} diff --git a/ansys/dpf/core/results.py b/ansys/dpf/core/results.py index 8f55b51c6b4..8bb75527157 100644 --- a/ansys/dpf/core/results.py +++ b/ansys/dpf/core/results.py @@ -60,13 +60,10 @@ class Results: Examples -------- - Create a stress result from the model and choose its time and mesh scopings. - >>> from ansys.dpf import core as dpf >>> from ansys.dpf.core import examples >>> model = dpf.Model(examples.electric_therm) >>> v = model.results.electric_potential - >>> rf = model.results.reaction_force >>> dissip = model.results.thermal_dissipation_energy Examples diff --git a/ansys/dpf/core/scoping.py b/ansys/dpf/core/scoping.py index 1f348d52017..757fe539280 100644 --- a/ansys/dpf/core/scoping.py +++ b/ansys/dpf/core/scoping.py @@ -254,7 +254,7 @@ def index(self, id: int): @property def ids(self): - """Retrive a list of IDs in the scoping. + """Retrieve a list of IDs in the scoping. Returns ------- diff --git a/ansys/dpf/core/server.py b/ansys/dpf/core/server.py index 264d1e79eed..77a91402cb2 100644 --- a/ansys/dpf/core/server.py +++ b/ansys/dpf/core/server.py @@ -21,6 +21,7 @@ from ansys.dpf.core import errors from ansys.dpf.core._version import __ansys_version__ +from ansys.dpf.core import session MAX_PORT = 65535 @@ -31,6 +32,10 @@ DPF_DEFAULT_PORT = int(os.environ.get("DPF_PORT", 50054)) LOCALHOST = os.environ.get("DPF_IP", "127.0.0.1") +RUNNING_DOCKER = {"use_docker": "DPF_DOCKER" in os.environ.keys()} +if RUNNING_DOCKER["use_docker"]: + RUNNING_DOCKER["docker_name"] = os.environ.get("DPF_DOCKER") +RUNNING_DOCKER['args'] = "" def shutdown_global_server(): try: @@ -63,15 +68,17 @@ def _global_server(): ``True``, start the server locally. If ``False``, connect to the existing server. """ - if dpf.core.SERVER is None: - if os.environ.get("DPF_START_SERVER", "").lower() == "false": - ip = os.environ.get("DPF_IP", LOCALHOST) - port = int(os.environ.get("DPF_PORT", DPF_DEFAULT_PORT)) - connect_to_server(ip, port) - else: - start_local_server() + if hasattr(dpf, "core") and hasattr(dpf.core, "SERVER"): + if dpf.core.SERVER is None: + if os.environ.get("DPF_START_SERVER", "").lower() == "false": + ip = os.environ.get("DPF_IP", LOCALHOST) + port = int(os.environ.get("DPF_PORT", DPF_DEFAULT_PORT)) + connect_to_server(ip, port) + else: + start_local_server() - return dpf.core.SERVER + return dpf.core.SERVER + return None def port_in_use(port, host=LOCALHOST): @@ -126,6 +133,8 @@ def start_local_server( ansys_path=None, as_global=True, load_operators=True, + use_docker_by_default=True, + docker_name=None ): """Start a new local DPF server at a given port and IP address. @@ -150,33 +159,43 @@ def start_local_server( use this IP and port. The default is ``True``. load_operators : bool, optional Whether to automatically load the math operators. The default is ``True``. + use_docker_by_default : bool, optional + If the environment variable DPF_DOCKER is set to a docker name and use_docker_by_default + is True, the server is ran as a docker (default is True). + docker_name : str, optional + To start DPF server as a docker, specify the docker name here. Returns ------- server : server.DpfServer """ - if ansys_path is None: - ansys_path = os.environ.get("AWP_ROOT" + __ansys_version__, find_ansys()) - if ansys_path is None: - raise ValueError( - "Unable to automatically locate the Ansys path. " - "Manually enter one when starting the server or set it " - 'as the environment variable "ANSYS_PATH"' - ) + use_docker = use_docker_by_default and (docker_name or RUNNING_DOCKER["use_docker"]) + if not use_docker: + if ansys_path is None: + ansys_path = os.environ.get("AWP_ROOT" + __ansys_version__, find_ansys()) + if ansys_path is None: + raise ValueError( + "Unable to automatically locate the Ansys path " + f"for version {__ansys_version__}." + "Manually enter one when starting the server or set it " + 'as the environment variable "ANSYS_PATH"' + ) - # verify path exists - if not os.path.isdir(ansys_path): - raise NotADirectoryError(f'Invalid Ansys path "{ansys_path}"') + # verify path exists + if not os.path.isdir(ansys_path): + raise NotADirectoryError(f'Invalid Ansys path "{ansys_path}"') - # parse the version to an int and check for supported - try: - ver = int(ansys_path[-3:]) - if ver < 211: - raise errors.InvalidANSYSVersionError(f"Ansys v{ver} does not support DPF") - if ver == 211 and is_ubuntu(): - raise OSError("DPF on v211 does not support Ubuntu") - except ValueError: - pass + # parse the version to an int and check for supported + try: + ver = int(ansys_path[-3:]) + if ver < 211: + raise errors.InvalidANSYSVersionError(f"Ansys v{ver} does not support DPF") + if ver == 211 and is_ubuntu(): + raise OSError("DPF on v211 does not support Ubuntu") + except ValueError: + pass + elif RUNNING_DOCKER["use_docker"]: + docker_name = RUNNING_DOCKER["docker_name"] # avoid using any ports in use from existing servers used_ports = [] @@ -192,12 +211,16 @@ def start_local_server( while port_in_use(port): port += 1 + if use_docker: + port = _find_port_available_for_docker_bind(port) + server = None n_attempts = 10 for _ in range(n_attempts): try: server = DpfServer( - ansys_path, ip, port, as_global=as_global, load_operators=load_operators + ansys_path, ip, port, as_global=as_global, + load_operators=load_operators, docker_name=docker_name ) break except errors.InvalidPortError: # allow socket in use errors @@ -218,7 +241,7 @@ def connect_to_server(ip=LOCALHOST, port=DPF_DEFAULT_PORT, as_global=True, timeo """Connect to an existing DPF server. This method sets the global default channel that is then used for the - duration of the DPF sesssion. + duration of the DPF session. Parameters ---------- @@ -233,7 +256,7 @@ def connect_to_server(ip=LOCALHOST, port=DPF_DEFAULT_PORT, as_global=True, timeo module. All DPF objects created in this Python session will use this IP and port. The default is ``True``. timeout : float, optional - Maximum number of seconds for the initalization attempt. + Maximum number of seconds for the initialization attempt. The default is ``10``. Once the specified number of seconds passes, the connection fails. @@ -275,7 +298,7 @@ class DpfServer: Port to connect to the remote instance on. The default is ``"DPF_DEFAULT_PORT"``, which is 50054. timeout : float, optional - Maximum number of seconds for the initalization attempt. + Maximum number of seconds for the initialization attempt. The default is ``10``. Once the specified number of seconds passes, the connection fails. as_global : bool, optional @@ -287,6 +310,8 @@ class DpfServer: is ``True``. launch_server : bool, optional Whether to launch the server on Windows. + docker_name : str, optional + To start DPF server as a docker, specify the docker name here. """ def __init__( @@ -298,6 +323,7 @@ def __init__( as_global=True, load_operators=True, launch_server=True, + docker_name=None, ): """Start the DPF server.""" # check valid ip and port @@ -308,7 +334,7 @@ def __init__( if os.name == "posix" and "ubuntu" in platform.platform().lower(): raise OSError("DPF does not support Ubuntu") elif launch_server: - launch_dpf(ansys_path, ip, port) + self._server_id = launch_dpf(ansys_path, ip, port, docker_name=docker_name) self.channel = grpc.insecure_channel("%s:%d" % (ip, port)) @@ -317,7 +343,7 @@ def __init__( # verify connection has matured tstart = time.time() while ((time.time() - tstart) < timeout) and not state._matured: - time.sleep(0.01) + time.sleep(0.001) if not state._matured: raise TimeoutError( @@ -339,6 +365,7 @@ def __init__( self._input_port = port self._own_process = launch_server self._base_service_instance = None + self._session_instance = None @property def _base_service(self): @@ -348,6 +375,12 @@ def _base_service(self): self._base_service_instance = BaseService(self, timeout=1) return self._base_service_instance + @property + def _session(self): + if not self._session_instance: + self._session_instance = session.Session(self) + return self._session_instance + @property def info(self): """Server information. @@ -397,15 +430,34 @@ def version(self): """ return self._base_service.server_info["server_version"] + @property + def os(self): + """Get the operating system of the server + + Returns + ------- + os : str + "nt" or "posix" + """ + return self._base_service.server_info["os"] + def __str__(self): return f"DPF Server: {self.info}" def shutdown(self): if self._own_process and self.live and self._base_service: self._base_service._prepare_shutdown() - p = psutil.Process(self._base_service.server_info["server_process_id"]) - p.kill() - time.sleep(0.1) + if hasattr(self, "_server_id") and self._server_id: + run_cmd = f"docker stop {self._server_id}" + process = subprocess.Popen(run_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + run_cmd = f"docker rm {self._server_id}" + for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): + pass + process = subprocess.Popen(run_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + else: + p = psutil.Process(self._base_service.server_info["server_process_id"]) + p.kill() + time.sleep(0.01) self.live = False try: if id(dpf.core.SERVER) == id(self): @@ -436,6 +488,10 @@ def __del__(self): except: pass + @property + def on_docker(self): + return hasattr(self, "_server_id") and self._server_id is not None + def check_version(self, required_version, msg=None): """Check if the server version matches with a required version. @@ -460,8 +516,67 @@ def check_version(self, required_version, msg=None): return server_meet_version_and_raise(required_version, self, msg) +def _find_port_available_for_docker_bind(port): + run_cmd = "docker ps --all" + process = subprocess.Popen(run_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + used_ports = [] + for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): + if not "CONTAINER ID" in line: + split = line.split("0.0.0.0:") + if len(split) > 1: + used_ports.append(int(split[1].split("-")[0])) + while port in used_ports: + port += 1 + return port + +def _run_launch_server_process(ansys_path, ip, port, docker_name): + if docker_name: + docker_server_port = int(os.environ.get("DOCKER_SERVER_PORT", port)) + dpf_run_dir = os.getcwd() + from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH + if os.name == "nt": + run_cmd = f"docker run -d -p {port}:{docker_server_port} " \ + f"{RUNNING_DOCKER['args']} " \ + f'-v "{LOCAL_DOWNLOADED_EXAMPLES_PATH}:/tmp/downloaded_examples" ' \ + f"-e DOCKER_SERVER_PORT={docker_server_port} " \ + f"--expose={docker_server_port} " \ + f"{docker_name}" + else: + run_cmd = ["docker run", + "-d", + f"-p"+f"{port}:{docker_server_port}", + RUNNING_DOCKER['args'], + f'-v "{LOCAL_DOWNLOADED_EXAMPLES_PATH}:/tmp/downloaded_examples"' + f"-e DOCKER_SERVER_PORT={docker_server_port}", + f"--expose={docker_server_port}", + docker_name] + else: + if os.name == "nt": + run_cmd = f"Ans.Dpf.Grpc.bat --address {ip} --port {port}" + path_in_install = "aisol/bin/winx64" + else: + run_cmd = ["./Ans.Dpf.Grpc.sh", f"--address {ip}", f"--port {port}"] + path_in_install = "aisol/bin/linx64" + + # verify ansys path is valid + if os.path.isdir(f"{ansys_path}/{path_in_install}"): + dpf_run_dir = f"{ansys_path}/{path_in_install}" + else: + dpf_run_dir = f"{ansys_path}" + if not os.path.isdir(dpf_run_dir): + raise NotADirectoryError( + f'Invalid ansys path at "{ansys_path}". ' + "Unable to locate the directory containing DPF at " + f'"{dpf_run_dir}"' + ) + + old_dir = os.getcwd() + os.chdir(dpf_run_dir) + process = subprocess.Popen(run_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + os.chdir(old_dir) + return process -def launch_dpf(ansys_path, ip=LOCALHOST, port=DPF_DEFAULT_PORT, timeout=10): +def launch_dpf(ansys_path, ip=LOCALHOST, port=DPF_DEFAULT_PORT, timeout=10, docker_name=None): """Launch Ansys DPF. Parameters @@ -476,46 +591,35 @@ def launch_dpf(ansys_path, ip=LOCALHOST, port=DPF_DEFAULT_PORT, timeout=10): Port to connect to the remote instance on. The default is ``"DPF_DEFAULT_PORT"``, which is 50054. timeout : float, optional - Maximum number of seconds for the initalization attempt. + Maximum number of seconds for the initialization attempt. The default is ``10``. Once the specified number of seconds passes, the connection fails. + docker_name : str, optional + To start DPF server as a docker, specify the docker name here. Returns ------- process : subprocess.Popen DPF Process. """ - if os.name == "nt": - run_cmd = f"Ans.Dpf.Grpc.bat --address {ip} --port {port}" - path_in_install = "aisol/bin/winx64" - else: - run_cmd = ["./Ans.Dpf.Grpc.sh", f"--address {ip}", f"--port {port}"] - path_in_install = "aisol/bin/linx64" - - # verify ansys path is valid - if os.path.isdir(f"{ansys_path}/{path_in_install}"): - dpf_run_dir = f"{ansys_path}/{path_in_install}" - else: - dpf_run_dir = f"{ansys_path}" - if not os.path.isdir(dpf_run_dir): - raise NotADirectoryError( - f'Invalid ansys path at "{ansys_path}". ' - "Unable to locate the directory containing DPF at " - f'"{dpf_run_dir}"' - ) - - old_dir = os.getcwd() - os.chdir(dpf_run_dir) - process = subprocess.Popen(run_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - os.chdir(old_dir) + process = _run_launch_server_process(ansys_path, ip, port, docker_name) # check to see if the service started lines = [] + docker_id = [] def read_stdout(): for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): LOG.debug(line) lines.append(line) + if docker_name: + docker_id.append(lines[0].replace("\n", "")) + docker_process = subprocess.Popen(f"docker logs {docker_id[0]}", + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + for line in io.TextIOWrapper(docker_process.stdout, encoding="utf-8"): + LOG.debug(line) + lines.append(line) errors = [] @@ -537,7 +641,7 @@ def read_stderr(): raise TimeoutError(f"Server did not start in {timeout} seconds") # verify there were no errors - time.sleep(1) + time.sleep(0.1) if errors: try: process.kill() @@ -547,3 +651,6 @@ def read_stderr(): if "Only one usage of each socket address" in errstr: raise errors.InvalidPortError(f"Port {port} in use") raise RuntimeError(errstr) + + if len(docker_id) > 0: + return docker_id[0] diff --git a/ansys/dpf/core/session.py b/ansys/dpf/core/session.py new file mode 100644 index 00000000000..4305d47f5e9 --- /dev/null +++ b/ansys/dpf/core/session.py @@ -0,0 +1,135 @@ +""" +Session +======== +""" + +import logging +import weakref + +from ansys import dpf +from ansys.dpf.core.check_version import version_requires, server_meet_version +from ansys.dpf.core.common import _common_percentage_progress_bar +from ansys.dpf.core.errors import protect_grpc + +LOG = logging.getLogger(__name__) +LOG.setLevel('DEBUG') + + +class Session: + """A class used to a user session on the server, it allows to plan events + call backs from the server when workflows are running. + A session is started every time a ``'DpfServer'`` is created. + """ + + def __init__(self, server=None): + if server is None: + server = dpf.core._global_server() + + self._server_weak_ref = weakref.ref(server) + if server_meet_version("3.0", self._server): + self._stub = self._connect() + self.__send_init_request() + self.add_progress_system() + + @property + def _server(self): + return self._server_weak_ref() + + @version_requires("3.0") + def _connect(self): + """Connect to the grpc service""" + from ansys.grpc.dpf import session_pb2_grpc + return session_pb2_grpc.SessionServiceStub(self._server.channel) + + @protect_grpc + def __send_init_request(self): + from ansys.grpc.dpf import session_pb2 + request = session_pb2.CreateSessionRequest() + self._message = self._stub.Create(request) + + @version_requires("3.0") + def add_workflow(self, workflow, identifier): + """Add a workflow to the session. It allows to follow the workflow's + events while it's running. + This method is automatically called when a workflow's output + is requested. + + Parameters + ---------- + workflow : Workflow + + identifier : str + name given to the workflow + """ + from ansys.grpc.dpf import session_pb2 + request = session_pb2.AddRequest() + request.session.CopyFrom(self._message) + request.wf.CopyFrom(workflow._message) + request.identifier = identifier + self._stub.Add(request) + + @version_requires("3.0") + def add_operator(self, operator, pin, identifier): + """Add a workflow made of the input operator and all his ancestors + to the session. It allows to follow the workflow's + events while it's running. + This method is automatically called when an operator's output + is requested and the opetion op.progress_bar is set to ``'True'``. + + Parameters + ---------- + operator : Operator + + pin : int + output pin number requested + + identifier : str + name given to the workflow + """ + from ansys.grpc.dpf import session_pb2 + request = session_pb2.AddRequest() + request.session.CopyFrom(self._message) + request.op_output.op.CopyFrom(operator._message) + request.op_output.pin = pin + request.identifier = identifier + self._stub.Add(request) + + @version_requires("3.0") + def listen_to_progress(self): + """Starts a progress bar and update it every time an operator is + finished. + """ + service = self._stub.ListenToProgress(self._message) + bar = _common_percentage_progress_bar("Workflow running") + bar.start() + for chunk in service: + try: + bar.update(chunk.progress.progress_percentage) + if len(chunk.state.state): + LOG.warning(chunk.state.state) + except Exception as e: + raise e + pass + try: + bar.finish() + except: + pass + + @version_requires("3.0") + def add_progress_system(self): + """Asks the session to start recording progress events. + Called when the session is started. + """ + self._stub.AddProgressEventSystem(self._message) + + @version_requires("3.0") + def flush_workflows(self): + """This removes the handle on the workflow by the session""" + self._stub.FlushWorkflows(self._message) + + def __del__(self): + try: + if server_meet_version("3.0", self._server): + self._stub.Delete(self._message) + except: + pass diff --git a/ansys/dpf/core/time_freq_scoping_factory.py b/ansys/dpf/core/time_freq_scoping_factory.py index 4894236e707..f73d0a81543 100644 --- a/ansys/dpf/core/time_freq_scoping_factory.py +++ b/ansys/dpf/core/time_freq_scoping_factory.py @@ -13,7 +13,7 @@ def scoping_by_load_step(load_step, server=None): """Create a specific ``ansys.dpf.core.Scoping`` for a given load step. - The returned scoping describes a specific time frequencey support element + The returned scoping describes a specific time frequency support element for a given load step. Parameters @@ -87,7 +87,7 @@ def scoping_by_set(cumulative_set, server=None): def scoping_by_sets(cumulative_sets, server=None): """Create a specific :class:`ansys.dpf.core.Scoping` for a given list of cumulative set indices. - The returned scoping describes a specific time frequencey support element for a given + The returned scoping describes a specific time frequency support element for a given list of cumulative sets of indices. Parameters diff --git a/ansys/dpf/core/time_freq_support.py b/ansys/dpf/core/time_freq_support.py index 1710b9812d3..daf3c65fbdd 100644 --- a/ansys/dpf/core/time_freq_support.py +++ b/ansys/dpf/core/time_freq_support.py @@ -19,7 +19,7 @@ class TimeFreqSupport: This class stores values such as the frequencies (time/complex), RPMs, and harmonic indices. The RPM value is a step (or load step)-based value. - The time freqencies, complex frequencies, and harmonic indices are set-based values. + The time frequencies, complex frequencies, and harmonic indices are set-based values. There is one set value for each step/substep combination. Parameters @@ -53,7 +53,10 @@ def __init__(self, time_freq_support=None, server=None): self._message = time_freq_support elif isinstance(time_freq_support, support_pb2.Support): self._message = time_freq_support_pb2.TimeFreqSupport() - self._message.id = time_freq_support.id + if isinstance(self._message.id, int): + self._message.id = time_freq_support.id + else: + self._message.id.id = time_freq_support.id.id else: request = base_pb2.Empty() self._message = self._stub.Create(request) @@ -300,7 +303,7 @@ def get_cumulative_index(self, step=0, substep=0, freq=None, cplx=False): freq : double, optional Frequency in Hz. cplx : False, optional - Whehter to return a complex frequency. The default is ``False``. + Whether to return a complex frequency. The default is ``False``. Returns ------- @@ -337,7 +340,12 @@ def _sets_count(self): request.entity = base_pb2.NUM_SETS return self._stub.Count(request).count - @protect_grpc + def __check_if_field_id(self, field): + if isinstance(field.id, int): + return field.id != 0 + else: + return field.id.id != 0 + def _get_frequencies(self, cplx=False): """Retrieves a field of all the frequencies in the model (complex or real). @@ -352,17 +360,13 @@ def _get_frequencies(self, cplx=False): field : dpf.core.Field Field of all the frequencies in the model (complex or real). """ - request = time_freq_support_pb2.ListRequest() - request.time_freq_support.CopyFrom(self._message) - list_response = self._stub.List(request) - if cplx is True and list_response.freq_complex.id != 0: - return dpf.core.Field(server=self._server, field=list_response.freq_complex) - elif cplx is False and list_response.freq_real.id != 0: - return dpf.core.Field(server=self._server, field=list_response.freq_real) - return None + attributes_list = self._get_attributes_list() + if cplx and "freq_complex" in attributes_list: + return attributes_list["freq_complex"] + elif cplx != True and "freq_real" in attributes_list: + return attributes_list["freq_real"] - @protect_grpc def _get_rpms(self): """Retrieves a field of all the RPMs in the model. @@ -371,15 +375,10 @@ def _get_rpms(self): field : dpf.core.Field Field of all the RPMs in the model (complex or real). """ - request = time_freq_support_pb2.ListRequest() - request.time_freq_support.CopyFrom(self._message) - - list_response = self._stub.List(request) - if list_response.rpm.id != 0: - return dpf.core.Field(server=self._server, field=list_response.rpm) - return None + attributes_list = self._get_attributes_list() + if "rpm" in attributes_list: + return attributes_list["rpm"] - @protect_grpc def _get_harmonic_indices(self, stage_num=0): """Retrieves a field of all the harmonic indices in the model. @@ -391,28 +390,47 @@ def _get_harmonic_indices(self, stage_num=0): stage_num: int, optional, default = 0 Targeted stage number. """ + attributes_list = self._get_attributes_list(stage_num) + if "cyc_harmonic_index" in attributes_list: + return attributes_list["cyc_harmonic_index"] + + @protect_grpc + def _get_attributes_list(self, stage_num=None): request = time_freq_support_pb2.ListRequest() request.time_freq_support.CopyFrom(self._message) - request.cyclic_stage_num = stage_num - + if stage_num: + request.cyclic_stage_num = stage_num list_response = self._stub.List(request) - if list_response.cyc_harmonic_index.id != 0: - return dpf.core.Field( - server=self._server, field=list_response.cyc_harmonic_index - ) - return None + out = {} + if list_response.HasField("freq_real"): + out["freq_real"] = dpf.core.Field( + server=self._server, field=list_response.freq_real) + if list_response.HasField("freq_complex"): + out["freq_complex"] = dpf.core.Field( + server=self._server, field=list_response.freq_complex) + if list_response.HasField("rpm"): + out["rpm"] = dpf.core.Field( + server=self._server, field=list_response.rpm) + if list_response.HasField("cyc_harmonic_index"): + out["cyc_harmonic_index"] = dpf.core.Field( + server=self._server, field=list_response.cyc_harmonic_index) + if hasattr(list_response, "cyclic_harmonic_index_scoping") and\ + list_response.HasField("cyclic_harmonic_index_scoping"): + out["cyclic_harmonic_index_scoping"] = dpf.core.Scoping( + server=self._server, scoping=list_response.cyclic_harmonic_index_scoping) + return out def append_step( - self, - step_id, - step_time_frequencies, - step_complex_frequencies=None, - rpm_value=None, - step_harmonic_indices=None, + self, + step_id, + step_time_frequencies, + step_complex_frequencies=None, + rpm_value=None, + step_harmonic_indices=None, ): """Append a step with all its field values in the time frequencies support. The RPM value is a step (or load step)-based value. - The values for time freqencies, complex frequencies, and harmonic indices are set-based. + The values for time frequencies, complex frequencies, and harmonic indices are set-based. There is one set value for each step/substep combination. It is necessary that each call of my_time_freq_support.append_step(kwargs**) contains @@ -502,7 +520,7 @@ def append_step( def deep_copy(self, server=None): """Create a deep copy of the data for a time frequency support on a given server. - This methos is useful for passing data from one server instance to another. + This method is useful for passing data from one server instance to another. Parameters ---------- diff --git a/ansys/dpf/core/workflow.py b/ansys/dpf/core/workflow.py index efe56f18adf..7f071821b1f 100644 --- a/ansys/dpf/core/workflow.py +++ b/ansys/dpf/core/workflow.py @@ -8,6 +8,7 @@ from ansys import dpf from ansys.dpf.core import dpf_operator, inputs, outputs from ansys.dpf.core.errors import protect_grpc +from ansys.dpf.core.check_version import server_meet_version, version_requires from ansys.grpc.dpf import base_pb2, workflow_pb2, workflow_pb2_grpc LOG = logging.getLogger(__name__) @@ -26,7 +27,7 @@ class Workflow: Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global server. - workflow : workflow_pb2.Workflow + workflow : workflow_message_pb2.Workflow Examples -------- @@ -61,8 +62,12 @@ def __init__(self, workflow=None, server=None): self._message = workflow - if workflow is None: - self.__send_init_request() + remote_copy_needed = server_meet_version("3.0", self._server) \ + and isinstance(workflow, workflow_pb2.RemoteCopyRequest) + if isinstance(workflow, str): + self.__create_from_stream(workflow) + elif workflow is None or remote_copy_needed: + self.__send_init_request(workflow) @protect_grpc def connect(self, pin_name, inpt, pin_out=0): @@ -106,12 +111,13 @@ def connect(self, pin_name, inpt, pin_out=0): request = workflow_pb2.UpdateConnectionRequest() request.wf.CopyFrom(self._message) request.pin_name = pin_name - dpf_operator._fillConnectionRequestMessage(request, inpt, pin_out) + tmp = dpf_operator._fillConnectionRequestMessage(request, inpt, self._server, pin_out) self._stub.UpdateConnection(request) @protect_grpc def get_output(self, pin_name, output_type): """Retrieve the output of the operator on the pin number. + A progress bar following the workflow state is printed. Parameters ---------- @@ -128,9 +134,19 @@ def get_output(self, pin_name, output_type): if output_type is not None: dpf_operator._write_output_type_to_proto_style(output_type, request) - out = self._stub.Get(request) + if server_meet_version("3.0", self._server): + # handle progress bar + self._server._session.add_workflow(self, "workflow") + out_future = self._stub.Get.future(request) + while out_future.is_active(): + self._server._session.listen_to_progress() + out = out_future.result() + else: + out = self._stub.Get(request) return dpf_operator._convertOutputMessageToPythonInstance( - out, output_type, self._server + out, + output_type, + self._server ) else: raise ValueError( @@ -381,20 +397,21 @@ def output_names(self): """ return self.info["output_names"] - def chain_with(self, workflow, input_output_names=None): - """Chain two workflows together so that they become one workflow. + @version_requires("3.0") + def connect_with(self, left_workflow, output_input_names=None): + """Chain 2 workflows together so that they become one workflow. The one workflow contains all the operators, inputs, and outputs exposed in both workflows. Parameters ---------- - workflow : core.Workflow - Second workflow's inputs to chained with this workflow's outputs. - input_output_names : str tuple, optional - Input name of the workflow to chain with the output name of the second workflow. - The default is ``None``, in which case this outputs in this workflow with the same - names as the inputs in the second workflow are chained. + left_workflow : core.Workflow + Second workflow's outputs to chained with this workflow's inputs. + output_input_names : str tuple, str dict optional + Input name of the left_workflow to be cained with the output name of this workflow. + The default is ``None``, in which case the inputs in the left_workflow with the same + names as the outputs of this workflow are chained. Examples -------- @@ -404,10 +421,10 @@ def chain_with(self, workflow, input_output_names=None): | INPUT: | | | |input_output_names = ("output","field" ) | - | ____ ______________________ | - | "data_sources" -> |this| -> "stuff" "field" -> |workflow_to_chain_with| -> "contour" | - |"time_scoping" -> | | "mesh_scoping" -> | | | - | |____| -> "output" |______________________| | + | _____________ ____________ | + | "data_sources" -> |left_workflow| -> "stuff" "field" -> | this | -> "contour" | + |"time_scoping" -> | | "mesh_scoping" -> | | | + | |_____________| -> "output" |____________| | | OUTPUT | | ____ | |"data_sources" -> |this| -> "stuff" | @@ -417,13 +434,96 @@ def chain_with(self, workflow, input_output_names=None): """ - request = workflow_pb2.ChainRequest() - request.wf.CopyFrom(self._message) - request.wf_to_chain_with.CopyFrom(workflow._message) - if input_output_names: - request.input_to_output.output_name = input_output_names[0] - request.input_to_output.input_name = input_output_names[1] - self._stub.Chain(request) + request = workflow_pb2.ConnectRequest() + request.right_wf.CopyFrom(self._message) + request.left_wf.CopyFrom(left_workflow._message) + if output_input_names: + if isinstance(output_input_names, tuple): + request.input_to_output.append( + workflow_pb2.InputToOutputChainRequest( + output_name=output_input_names[0], + input_name=output_input_names[1])) + elif isinstance(output_input_names, dict): + for key in output_input_names: + request.input_to_output.append( + workflow_pb2.InputToOutputChainRequest( + output_name=key, + input_name=output_input_names[key])) + else: + raise TypeError("output_input_names argument is expect" + "to be either a str tuple or a str dict") + + self._stub.Connect(request) + + @version_requires("3.0") + def create_on_other_server(self, *args, **kwargs): + """Create a new instance of a workflow on another server. The new + Workflow has the same operators, exposed inputs and output pins as + this workflow. Connections between operators and between data and + operators are kept (except for exposed pins). + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + + ip : str, optional + ip address on which the new instance should be created (always put + a port in args as well) + + port : str, int , optional + + address: str, optional + address on which the new instance should be created ("ip:port") + + Returns + ------- + Workflow + + Examples + -------- + Create a generic Workflow computing the minimum of displacement by chaining the ``'U'`` + and ``'min_max_fc'`` operators. + + >>> from ansys.dpf import core as dpf + >>> disp_op = dpf.operators.result.displacement() + >>> max_fc_op = dpf.operators.min_max.min_max_fc(disp_op) + >>> workflow = dpf.Workflow() + >>> workflow.add_operators([disp_op,max_fc_op]) + >>> workflow.set_input_name("data_sources", disp_op.inputs.data_sources) + >>> workflow.set_output_name("min", max_fc_op.outputs.field_min) + >>> workflow.set_output_name("max", max_fc_op.outputs.field_max) + >>> #other_server = dpf.start_local_server(as_global=False) + >>> #new_workflow = workflow.create_on_other_server(server=other_server) + >>> #assert 'data_sources' in new_workflow.input_names + + """ + server = None + address = None + for arg in args: + if isinstance(arg, dpf.core.server.DpfServer): + server = arg + elif isinstance(arg, str): + address = arg + + if "ip" in kwargs: + address = kwargs["ip"] + ":" + str(kwargs["port"]) + if "address" in kwargs: + address = kwargs["address"] + if "server" in kwargs: + server = kwargs["server"] + if server: + text_stream = self._stub.WriteToStream(self._message) + return Workflow(workflow=text_stream.stream, server=server) + elif address: + request = workflow_pb2.RemoteCopyRequest() + request.wf.CopyFrom(self._message) + request.address = address + return Workflow(workflow=request, server=self._server) + else: + raise ValueError("a connection address (either with address input" + "or both ip and port inputs) or a server is required") def _connect(self): """Connect to the gRPC service.""" @@ -447,6 +547,18 @@ def __str__(self): return _description(self._message, self._server) @protect_grpc - def __send_init_request(self): - request = base_pb2.Empty() + def __send_init_request(self, workflow): + if server_meet_version("3.0", self._server) \ + and isinstance(workflow, workflow_pb2.RemoteCopyRequest): + request = workflow_pb2.CreateRequest() + request.remote_copy.CopyFrom(workflow) + else: + request = base_pb2.Empty() + if hasattr(workflow_pb2, "CreateRequest"): + request = workflow_pb2.CreateRequest(empty=request) self._message = self._stub.Create(request) + + @protect_grpc + def __create_from_stream(self, string): + request = workflow_pb2.TextStream(stream=string) + self._message = self._stub.LoadFromStream(request) diff --git a/conftest.py b/conftest.py index 4e03af4c1f7..e6e6852a512 100644 --- a/conftest.py +++ b/conftest.py @@ -3,7 +3,6 @@ Launch or connect to a persistent local DPF service to be shared in pytest as a session fixture """ -import pytest from ansys.dpf import core from ansys.dpf.core.misc import module_exists @@ -16,14 +15,4 @@ # enable off_screen plotting to avoid test interruption - -@pytest.fixture(scope="session", autouse=True) -def cleanup(request): - """Cleanup a testing directory once we are finished.""" - - def close_servers(): - core.server.shutdown_all_session_servers() - - request.addfinalizer(close_servers) - core.settings.disable_off_screen_rendering() diff --git a/docs/Makefile b/docs/Makefile index 297b3eef147..9b5689e8387 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -20,4 +20,3 @@ help: clean: rm -rf $(BUILDDIR)/* - rm -rf source/examples/ diff --git a/docs/source/_static/dpf.html b/docs/source/_static/dpf.html index b871e06b195..e3893af86ef 100644 --- a/docs/source/_static/dpf.html +++ b/docs/source/_static/dpf.html @@ -12,10 +12,38 @@ } body { - font-family: Arial, Helvetica, sans-serif; - + font-family: Arial, Helvetica, sans-serif; + padding: 0; + margin: 0; + border: 0; + overflow: hidden; } +.topbottom { + position: absolute; + left: 0; + top: 0; + padding: 0; + margin: 0; + border: 0; + width: 100%; + height: 100%; + display: flex; + flex-direction: column; + overflow: hidden; +} +.leftright { + flex: 1; + width: 100%; + height: 10%; + padding: 0; + margin: 0; + border: 0; + display: flex; + flex-direction: row; +} + + * { box-sizing: border-box; } @@ -38,26 +66,22 @@ /* The sidebar menu */ .sidenav { - margin-top: 112px; - margin-left: 0px; - height: 100%; /* Full-height: remove this if you want "auto" height */ width: 300px; /* Set the width of the sidebar */ - position: fixed; /* Fixed Sidebar (stay in place on scroll) */ z-index: 1; /* Stay on top */ - left: 0; background-color: var(--main-medium-light-color); - overflow-x: hidden; /* Disable horizontal scroll */ padding-top: 20px; + overflow-y: auto; } .sidenav h2 { padding-left : 20px;} /* The navigation menu links */ .sidenav a { - padding: 6px 8px 6px 16px; - text-decoration: none; - font-size: 16px; - display: block; + padding: 6px 8px 6px 16px; + text-decoration: none; + font-size: 16px; + display: block; + caret-color: transparent; } /* When you mouse over the navigation links, change their color */ @@ -67,12 +91,9 @@ /* Style page content */ .main { - margin-left: 300px; - margin-right: -8px; - padding-top: 10px; background-color: var(--main-light-grey-color); - padding-bottom: 30px; - padding-top: 150px; + flex: 1; + overflow-y: auto; } @@ -161,18 +182,16 @@ /* Add a black background color to the top navigation */ .topnav { - background-color: var(--main-top-bar-color); /*balck*/ - position: fixed; - margin-left: 0px; + background-color: var(--main-top-bar-color); /*black*/ + position: relative; + top: 0; + left: 0; width: 100%; height: 120px; - /*overflow: hidden;*/ padding: 0px; - margin-left: -8px; - margin-right: -8px; - margin-top: -8px; cursor: pointer; z-index: 100; + caret-color: transparent; } /* Style of dpf logo*/ .dpf-logo @@ -437,13 +456,13 @@ margin: 0; cursor: text; /* same core font for outer and inner elements */ - font: normal normal normal 13px monaco, courier, monospace; + font: normal normal normal 15px monaco, courier, monospace; line-height:20px; } pre { padding: 0px 10px 0px 10px; /* overall font-size */ - font: normal normal normal 13px monaco, courier, monospace; + font: normal normal normal 15px monaco, courier, monospace; } dl @@ -639,34 +658,42 @@ - - -
-

Available Operators

- -
- -
+

Overview of Data Processing Framework

The Data Processing Framework (DPF) is designed to provide numerical simulation users/engineers with a toolbox for accessing and transforming simulation data. DPF can access data from solver result files as well as several neutral formats (csv, hdf5, vtk, etc.). Various operators are available allowing the manipulation and the transformation of this data. DPF is a workflow-based framework which allows simple and/or complex evaluations by chaining operators. The data in DPF is defined based on physics agnostic mathematical quantities described in a self-sufficient entity called field. This allows DPF to be a modular and easy to use tool with a large range of capabilities. It's a product designed to handle large amount of data.

Advantages

Computation efficiency
DPF is a modern framework and it has been developed by taking advantages of new hardware architectures. Thanks to continued development, new capabilities are frequently added.
@@ -675,24 +702,30 @@

Available Operators

Extensibility and Customization
DPF is developed around very few entities, one for the data (field) and one for the operation (operator). Each of DPF capability is developed through operators which allows for a very good componentization of the framework. DPF is also plugin based, this way, adding new features or handling new formats is fast and easy. With this componentization, thoses plugins and the usage of dpf scripting, the user can add his own capabilities and link his existing work with dpf.
-

How to use the CPython package

Install the environment

-

DPF's CPython interface is a grpc service. Its server is available in Workbench installation under aisol/bin/{platform}/Ans.Dpf.Grpc.exe. The client is based on a python SDK and protobuf generated python scripts. To use this API, the environment must have:

-
    -
  • grpc io-tools
  • -
  • jupyter-lab
  • -
-

To install those module with anaconda:

-

Before launching the jupyter notebook application, several environment variables need to be set:

-
    -
  • %ANSYS_PATH% to the install folder: ANSYSInc/v{version}.
  • -
  • %DPF_PATH% to %ANSYS_PATH%/aisol/bin/{platform}/Ans.Dpf.Grpc.exe.
  • -
  • %PYTHONPATH%: add the paths to DPF's SDK and protobuf generated python scripts.
  • -
+

How to use the CPython package

Install and consume open source environment

+

DPF's CPython interface is based on gRPC service. Its server is available in Workbench installation under aisol/bin/{platform}/Ans.Dpf.Grpc.bat (Windows) or Ans.Dpf.Grpc.sh (Linux). The client is based on a Python SDK and protobuf generated Python scripts.

+

Two modules of DPF Python services are currently available: DPF-Core and DPF-Post. They are both available in open source. DPF-Core is also available for internal development.

+ +

Open source modules

+

Once you've installed Ansys 2021R1 or newer, you can install DPF with:

+

This will install the latest version of ``ansys-dpf-core`` and ``ansys-dpf-post`` and all the necessary dependencies.

-

Connect to the server

-

Once Jupyter Notebook or Lab is launched with the requierements above, connecting the Python client to the service is done with:

-

How to use the IPython package

ACT Console

+

Editable open source install (Development Mode)

+

If you wish to edit and potentially contribute to the DPF-Core or DPF-Post python +module, clone the repository and install it using pip with the ``-e`` development flag.

+ +

Connect to a specific server

+

+ User may want to set a server with a specific id and port in order to connect to it from a different machine. Here is the code to proceed: +

+

How to use the IPython package

ACT Console

Open the ACT console scripting in Mechanical

The ACT console can be opened by clicking on "Automation"/"Scripting" menus in Mechanical.

Import DPF and connect it to the current console

@@ -706,7 +739,15 @@

DPF's helpers to access mechanical's data

  • a mesh selection in the interface: to collect a mesh Scoping (DPF’s entity representing a list of ids of nodes or elements), once a geometry selection is picked out in the interface, it can be accessed via:
  • -

    How to transform the data

    +

    Python Result

    +

    The Python Result object allows to evaluate a workflow and to visualize its outputs in Mechanical (contours/graph/mesh...).

    +

    Enable the Python Result

    +

    To enable its use, the feature flags “Beta Options” (tab Tools/Appearance) and “Create Python Results” (tab Tools/Mechanical) must be checked in Workbench interface. From Mechanical, a Python Result object can be inserted under the Solution object using contextual menu or using Model ribbon menu.

    +

    Evaluate a Python Result

    +

    Once the python result object is added (see previous part) and written (a template script to calculate the Total Deformation result at the last step is provided by default) and the analysis is solved, an evaluation is needed .Any changes to the script’s contents will make the state of the Python Result object go “Undefined”. Users must then “Connect” the callback back to the event before evaluating by right clicking on the python result object and selecting "Connect". The user can finally click on "Solve" or "Evaluate Result".

    +

    Write a Python Result

    +

    The Workflow is the global entity that will be uses. Built by chaining operators, it will evaluate the data defined by the used operators. It needs input information, and it will compute the requested output information. This "output information" is then used by mechanical to plot contours/tables/charts and meshes on the application. To chose the information that will be used by mechanical, the user must expose results out of his workflow. To do so, the workflow's API give the ability to "SetOutputContour(my_operator)" for contour (the colors) plotted on the geometry, to "SetOutputWarpField(my_operator)" to deforme the contours (with 3D displacements) and to "SetOutputMesh(my_operator)" to plot the results on a different mesh than the current mesh in Mechanical. If "SetOutputContour(my_operator)" publishes several results over time in a fields container, a chart and a table will plot the min and max over time. Here is an example using the 3 output types, other examples can be found under "APIs"/"Worflow Examples":

    +

    How to transform the data

    Operator

    The Operator is the only object used to create and transform the data. It can be seen as an integrated circuit in electronics with a range of pins in input and in output. When the operator is evaluated, it will process the input information to compute its output with respect to its description. The operator is made of:

      @@ -733,7 +774,7 @@

      Workflow

      The workflow is built by chaining operators. It will evaluate the data processing defined by the used operators. It needs input information, and it will compute the requested output information. The workflow is used to create a black box computing more or less basic transformation of the data. The different operators contained by a workflow can be internally connected together so that the end user doesn't need to be aware of its complexity. The workflow only needs to expose the necessary inputs pin and output pins. For example, a workflow could expose a "time scoping" input pin and a "data sources" input pin and expose a "result" output pin and have very complex routines inside it. See workflows' examples in the APIs tab.

      -
    +
    Meshed Region

    Time Freq Support

    The time freq support describes an analysis'temporal or frequential space. For a transient analysis all the time sets cumulatives indeces with their times are described. For a harmonic analysis, the real and imaginary frequencies, the RPMs, the load steps are described.

    + +

    Model

    +

    The model is a helper designed to give shortcuts to the user to access a model's metadata and to instanciate results provider for this model. A Model is able to open a DataSources or a Streams to read the metadata and expose it to the user. The metadata is made of all the entities describing a model: its MeshedRegion, its TimeFreqSupport and it's ResultInfo. With the model, the user can easily access information about the mesh, about the time/freq steps and substeps used in the analysis and the list of available results.

    +

    Using DPF's entities in scripting

    Scoping

    Create a Scoping

    The Scoping is a set of entity ids defined on a location (the location is optional).

    @@ -1479,7 +1524,7 @@

    Create a Meshed Region

    The user can create his own data to manipulate it with dpf. THe Meshed Region can be created simply with:

    Get Meshed Region's data from DataSources

    -

    A model is usually represented by a Meshed Region in DPF. The mesh provider operator allows to access an analysis' mesh. The user can then get different informations in the mesh like the coordinates of all the nodes and the connectivity between elements and nodes.

    +

    A model is usually represented by a Meshed Region in DPF. The mesh provider operator allows to access an analysis' mesh. The user can then get different information in the mesh like the coordinates of all the nodes and the connectivity between elements and nodes.

    Time Freq Support

    @@ -1489,7 +1534,13 @@

    Create Time Freq Support

    Get Time Freq Support's data from DataSources

    Time Freq Support of a specific file can be accessed using the following methods.

    -

    Using DPF's operators in scripting

    Operator types

    + + +

    Model

    +

    Explore a Model

    +

    The Model is built with DataSources that it will open (in a streams by default) to explore an analysis. Printing the model is a good tool to see the results that are available.

    + +

    Using DPF's operators in scripting

    Operator types

    In DPF, the operator is used to import and modify the simulation data. We can count 3 main types of operators:

    • Operators importing/reading data

    • @@ -1517,7 +1568,7 @@

      Chaining operators together

      2 syntaxes can be used to create and connect operators together:

      Configurating operators

      -

      Advanced user might want to configurate an operator's behavior during its running phase. This can be done through the "config". This option allows to choose if an operator can directly modify the input data container instead of creating a new one with the "inplace" configuration, to choose if an operation between to fields should use their indeces or mesh ids with the "work_by_index" configuration... Each operator's description explains which configuration are supported.

    Advanced user might want to configurate an operator's behavior during its running phase. This can be done through the "config". This option allows to choose if an operator can directly modify the input data container instead of creating a new one with the "inplace" configuration, to choose if an operation between to fields should use their indeces or mesh ids with the "work_by_index" configuration... Each operator's description explains which configuration are supported.
    Configurating operators 1,2.00E+00,2.00E+00,2.00E+00,2.00E+00,2.00E+00,2.00E+00 2,2.00E+00,2.00E+00,2.00E+00,2.00E+00,2.00E+00,2.00E+00 3,2.00E+00,2.00E+00,2.00E+00,2.00E+00,2.00E+00,2.00E+00 -end_of_set,,,,,,">Configurating operators 0 0 0 0 0 0 0 0 0 -">

    Example of workflows and their scripts

    math: amplitude (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: mesh support provider

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: nodal fraction (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic expansion

    Inputs

    Outputs

    Configurations

    Scripting

    geo: mass

    Inputs

    Outputs

    Configurations

    Scripting

    math: unit convert (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: -

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain principal 1

    Inputs

    Outputs

    Configurations

    Scripting

    math: multiply (complex fields)

    Inputs

    Outputs

    Configurations

    Scripting

    math: unit convert

    Inputs

    Outputs

    Configurations

    Scripting

    math: accumulate min over label

    Inputs

    Outputs

    Configurations

    Scripting

    math: +

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: min max over time

    Inputs

    Outputs

    Configurations

    Scripting

    math: + (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: phase of max

    Inputs

    Outputs

    Configurations

    Scripting

    math: sin (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: + constant (field)

    Inputs

    Outputs

    Configurations

    Scripting

    math: + constant (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: total sum

    Inputs

    Outputs

    Configurations

    Scripting

    math: - (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: ^ (field)

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: intersect scopings

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: elements in mesh

    Inputs

    Outputs

    Configurations

    Scripting

    math: scale (field)

    Inputs

    Outputs

    Configurations

    Scripting

    math: ^ (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: scale (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: sweeping phase

    Inputs

    Outputs

    Configurations

    Scripting

    math: centroid

    Inputs

    Outputs

    Configurations

    Scripting

    math: sweeping phase (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: centroid (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: ^2 (field)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental fraction (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: sin (field)

    Inputs

    Outputs

    Configurations

    Scripting

    math: cos (field)

    Inputs

    Outputs

    Configurations

    Scripting

    result: rigid transformation

    Inputs

    Outputs

    Configurations

    Scripting

    utility: convert to fields container

    Inputs

    Outputs

    Configurations

    Scripting

    math: cos (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: linear combination

    Inputs

    Outputs

    Configurations

    Scripting

    math: ^2 (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: sqrt (field)

    Inputs

    Outputs

    Configurations

    Scripting

    math: norm (field)

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: time of max

    Inputs

    Outputs

    Configurations

    Scripting

    math: sqrt (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: norm (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: / (component-wise field)

    Inputs

    Outputs

    Configurations

    Scripting

    math: / (component-wise fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: kronecker product

    Inputs

    Outputs

    Configurations

    Scripting

    utility: html doc

    Inputs

    Outputs

    Configurations

    Scripting

    math: real part

    Inputs

    Outputs

    Configurations

    Scripting

    math: conjugate

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged elastic strains

    Inputs

    Outputs

    Configurations

    Scripting

    math: imaginary part

    Inputs

    Outputs

    Configurations

    Scripting

    math: modulus (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: + (complex fields)

    Inputs

    Outputs

    Configurations

    Scripting

    math: dot (complex fields)

    Inputs

    Outputs

    Configurations

    Scripting

    math: / (complex fields)

    Inputs

    Outputs

    Configurations

    Scripting

    utility: unitary field

    Inputs

    Outputs

    Configurations

    Scripting

    math: dot (field)

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain Y

    Inputs

    Outputs

    Configurations

    Scripting

    math: derivate (complex fields)

    Inputs

    Outputs

    Configurations

    Scripting

    math: polar to complex fields

    Inputs

    Outputs

    Configurations

    Scripting

    math: dot (by scalar field)

    Inputs

    Outputs

    Configurations

    Scripting

    math: dot (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: phase (field)

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal moment

    Inputs

    Outputs

    Configurations

    Scripting

    math: dot (by scalar field) (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic analytic disp max

    Inputs

    Outputs

    Configurations

    Scripting

    math: phase (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    math: modulus (field)

    Inputs

    Outputs

    Configurations

    Scripting

    result: elemental mass

    Inputs

    Outputs

    Configurations

    Scripting

    math: total sum (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: heat flux

    Inputs

    Outputs

    Configurations

    Scripting

    result: co-energy

    Inputs

    Outputs

    Configurations

    Scripting

    math: dot

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged equivalent thermal strains

    Inputs

    Outputs

    Configurations

    Scripting

    math: overall dot

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: min max by entity

    Inputs

    Outputs

    Configurations

    Scripting

    result: nmisc

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: min max by entity over time

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: max over time

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: connectivity ids

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: min over time

    Inputs

    Outputs

    Configurations

    Scripting

    geo: element nodal contribution

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: time of min

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: max over phase

    Inputs

    Outputs

    Configurations

    Scripting

    math: dot (tensors)

    Inputs

    Outputs

    Configurations

    Scripting

    math: invert

    Inputs

    Outputs

    Configurations

    Scripting

    math: invert (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain principal 3

    Inputs

    Outputs

    Configurations

    Scripting

    logic: same meshes?

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: external layer

    Inputs

    Outputs

    Configurations

    Scripting

    logic: component selector (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    logic: component selector (field)

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: on property

    Inputs

    Outputs

    Configurations

    Scripting

    utility: extract field

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: node coordinates

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: stl export

    Inputs

    Outputs

    Configurations

    Scripting

    utility: bind support

    Inputs

    Outputs

    Configurations

    Scripting

    utility: convert to field

    Inputs

    Outputs

    Configurations

    Scripting

    utility: change location

    Inputs

    Outputs

    Configurations

    Scripting

    utility: voigt to standard strains

    Inputs

    Outputs

    Configurations

    Scripting

    utility: set property

    Inputs

    Outputs

    Configurations

    Scripting

    utility: forward field

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: points from coordinates

    Inputs

    Outputs

    Configurations

    Scripting

    utility: forward fields container

    Inputs

    Outputs

    Configurations

    Scripting

    utility: forward meshes container

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain principal 2

    Inputs

    Outputs

    Configurations

    Scripting

    geo: integrate over elements

    Inputs

    Outputs

    Configurations

    Scripting

    geo: center of gravity

    Inputs

    Outputs

    Configurations

    Scripting

    utility: forward

    Inputs

    Outputs

    Configurations

    Scripting

    utility: txt file to dpf

    Inputs

    Outputs

    Configurations

    Scripting

    utility: bind support (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: extract from field

    Inputs

    Outputs

    Configurations

    Scripting

    result: pres to field

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: extend to mid nodes (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental nodal to nodal elemental (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    utility: python generator

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic expanded acceleration

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain Z

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: result info provider

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress X

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress Y

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress Z

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress XY

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress YZ

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress XZ

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress principal 1

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress principal 2

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress principal 3

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal solution to global cs

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain X

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain XY

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain YZ

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain XZ

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: eigen values (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain principal 1

    Inputs

    Outputs

    Configurations

    Scripting

    geo: scoping normals

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain principal 2

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain principal 3

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: to elemental (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: transpose

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain X

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain Y

    Inputs

    Outputs

    Configurations

    Scripting

    filter: band pass (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    geo: to polar coordinates

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain Z

    Inputs

    Outputs

    Configurations

    Scripting

    serialization: vtk export

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain XY

    Inputs

    Outputs

    Configurations

    Scripting

    result: hydrostatic pressure

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain YZ

    Inputs

    Outputs

    Configurations

    Scripting

    filter: low pass (only scoping)

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain XZ

    Inputs

    Outputs

    Configurations

    Scripting

    result: acceleration

    Inputs

    Outputs

    Configurations

    Scripting

    result: acceleration X

    Inputs

    Outputs

    Configurations

    Scripting

    result: poynting vector

    Inputs

    Outputs

    Configurations

    Scripting

    result: acceleration Y

    Inputs

    Outputs

    Configurations

    Scripting

    result: acceleration Z

    Inputs

    Outputs

    Configurations

    Scripting

    result: element centroids

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: rescope (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: velocity

    Inputs

    Outputs

    Configurations

    Scripting

    result: reaction force

    Inputs

    Outputs

    Configurations

    Scripting

    serialization: serializer

    Inputs

    Outputs

    Configurations

    Scripting

    result: velocity X

    Inputs

    Outputs

    Configurations

    Scripting

    result: velocity Y

    Inputs

    Outputs

    Configurations

    Scripting

    result: velocity Z

    Inputs

    Outputs

    Configurations

    Scripting

    result: displacement

    Inputs

    Outputs

    Configurations

    Scripting

    result: displacement X

    Inputs

    Outputs

    Configurations

    Scripting

    result: displacement Y

    Inputs

    Outputs

    Configurations

    Scripting

    result: displacement Z

    Inputs

    Outputs

    Configurations

    Scripting

    result: heat flux X

    Inputs

    Outputs

    Configurations

    Scripting

    result: heat flux Y

    Inputs

    Outputs

    Configurations

    Scripting

    result: electric field

    Inputs

    Outputs

    Configurations

    Scripting

    result: heat flux Z

    Inputs

    Outputs

    Configurations

    Scripting

    result: element nodal forces

    Inputs

    Outputs

    Configurations

    Scripting

    result: structural temperature

    Inputs

    Outputs

    Configurations

    Scripting

    result: thermal strain

    Inputs

    Outputs

    Configurations

    Scripting

    result: incremental energy

    Inputs

    Outputs

    Configurations

    Scripting

    serialization: mechanical csv to field

    Inputs

    Outputs

    Configurations

    Scripting

    result: stiffness matrix energy

    Inputs

    Outputs

    Configurations

    Scripting

    result: equivalent stress parameter

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: skin (tri mesh)

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress ratio

    Inputs

    Outputs

    Configurations

    Scripting

    result: accu eqv plastic strain

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic state variable

    Inputs

    Outputs

    Configurations

    Scripting

    math: average over label

    Inputs

    Outputs

    Configurations

    Scripting

    result: accu eqv creep strain

    Inputs

    Outputs

    Configurations

    Scripting

    mapping: scoping on coordinates

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain energy density

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic expanded el strain

    Inputs

    Outputs

    Configurations

    Scripting

    result: creep strain energy density

    Inputs

    Outputs

    Configurations

    Scripting

    result: material property of element

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain energy density

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact status

    Inputs

    Outputs

    Configurations

    Scripting

    serialization: field to csv

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact penetration

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact pressure

    Inputs

    Outputs

    Configurations

    Scripting

    geo: moment of inertia

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact friction stress

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact total stress

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic expanded element nodal forces

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact sliding distance

    Inputs

    Outputs

    Configurations

    Scripting

    serialization: vtk to fields

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact gap distance

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact surface heat flux

    Inputs

    Outputs

    Configurations

    Scripting

    result: num surface status changes

    Inputs

    Outputs

    Configurations

    Scripting

    result: contact fluid penetration pressure

    Inputs

    Outputs

    Configurations

    Scripting

    result: elemental volume

    Inputs

    Outputs

    Configurations

    Scripting

    result: artificial hourglass energy

    Inputs

    Outputs

    Configurations

    Scripting

    result: kinetic energy

    Inputs

    Outputs

    Configurations

    Scripting

    result: thermal dissipation energy

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal force

    Inputs

    Outputs

    Configurations

    Scripting

    result: temperature

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged equivalent plastic strain

    Inputs

    Outputs

    Configurations

    Scripting

    result: raw displacement

    Inputs

    Outputs

    Configurations

    Scripting

    result: raw reaction force

    Inputs

    Outputs

    Configurations

    Scripting

    result: electric potential

    Inputs

    Outputs

    Configurations

    Scripting

    result: thickness

    Inputs

    Outputs

    Configurations

    Scripting

    result: custom result

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress von mises

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: time freq provider

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: material provider

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: streams provider

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: mesh provider

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: mesh selection manager provider

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged thermal strains

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: boundary condition provider

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: cyclic analysis?

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: material support provider

    Inputs

    Outputs

    Configurations

    Scripting

    serialization: deserializer

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic expanded velocity

    Inputs

    Outputs

    Configurations

    Scripting

    logic: same property fields?

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: over field

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: over fields container

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: over label

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: max by component

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: max by component

    Inputs

    Outputs

    Configurations

    Scripting

    logic: merge fields by label

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: incremental over fields container

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: splitted on property type

    Inputs

    Outputs

    Configurations

    Scripting

    min_max: incremental over field

    Inputs

    Outputs

    Configurations

    Scripting

    math: accumulate over label

    Inputs

    Outputs

    Configurations

    Scripting

    result: equivalent radiated power

    Inputs

    Outputs

    Configurations

    Scripting

    math: accumulate level over label

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: rescope

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: on named selection

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: cyclic support provider

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: nodes in mesh

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: adapt with scopings container

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental nodal to nodal elemental (field)

    Inputs

    Outputs

    Configurations

    Scripting

    utility: change shell layers

    Inputs

    Outputs

    Configurations

    Scripting

    logic: merge solid and shell fields

    Inputs

    Outputs

    Configurations

    Scripting

    logic: same fields?

    Inputs

    Outputs

    Configurations

    Scripting

    logic: fields included?

    Inputs

    Outputs

    Configurations

    Scripting

    logic: same fields container?

    Inputs

    Outputs

    Configurations

    Scripting

    filter: high pass (field)

    Inputs

    Outputs

    Configurations

    Scripting

    filter: high pass (only scoping)

    Inputs

    Outputs

    Configurations

    Scripting

    filter: high pass (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    filter: low pass (field)

    Inputs

    Outputs

    Configurations

    Scripting

    filter: low pass (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    filter: band pass (field)

    Inputs

    Outputs

    Configurations

    Scripting

    geo: rotate cylindrical coordinates

    Inputs

    Outputs

    Configurations

    Scripting

    filter: band pass (only scoping)

    Inputs

    Outputs

    Configurations

    Scripting

    result: mapdl run

    Inputs

    Outputs

    Configurations

    Scripting

    serialization: csv to field

    Inputs

    Outputs

    Configurations

    Scripting

    geo: rotate

    Inputs

    Outputs

    Configurations

    Scripting

    logic: enrich materials

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental nodal to nodal (field)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental nodal to nodal (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental to nodal (field)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental to nodal (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: nodal difference (field)

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: eigen vectors

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: nodal difference (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental difference (field)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental difference (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: to nodal (field)

    Inputs

    Outputs

    Configurations

    Scripting

    geo: rotate in cylindrical coordinates (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: eigen values (field)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: to nodal (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: extend to mid nodes (field)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental mean (field)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: elemental mean (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: nodal to elemental (field)

    Inputs

    Outputs

    Configurations

    Scripting

    averaging: nodal to elemental (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: von mises eqv (field)

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: scalar invariants (field)

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: principal invariants (field)

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: von mises eqv (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: scalar invariants (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic strain energy

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: principal invariants (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    geo: rotate (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    geo: normals provider nl (nodes or elements)

    Inputs

    Outputs

    Configurations

    Scripting

    geo: elements volumes over time

    Inputs

    Outputs

    Configurations

    Scripting

    geo: elements facets surfaces over time

    Inputs

    Outputs

    Configurations

    Scripting

    scoping: from mesh

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: from scoping

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: split field wrt mesh regions

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: split mesh wrt property

    Inputs

    Outputs

    Configurations

    Scripting

    result: torque

    Inputs

    Outputs

    Configurations

    Scripting

    metadata: cyclic mesh expansion

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic analytic stress eqv max

    Inputs

    Outputs

    Configurations

    Scripting

    result: remove rigid body motion (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: recombine cyclic harmonic indeces

    Inputs

    Outputs

    Configurations

    Scripting

    mapping: on coordinates

    Inputs

    Outputs

    Configurations

    Scripting

    mapping: solid to skin

    Inputs

    Outputs

    Configurations

    Scripting

    geo: elements volume

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged thermal swelling strains

    Inputs

    Outputs

    Configurations

    Scripting

    result: poynting vector surface

    Inputs

    Outputs

    Configurations

    Scripting

    result: add rigid body motion (field)

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged stresses

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged plastic strains

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged creep strains

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged equivalent elastic strain

    Inputs

    Outputs

    Configurations

    Scripting

    result: nodal averaged equivalent creep strain

    Inputs

    Outputs

    Configurations

    Scripting

    result: euler nodes

    Inputs

    Outputs

    Configurations

    Scripting

    result: enf solution to global cs

    Inputs

    Outputs

    Configurations

    Scripting

    result: cms matrices provider

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: skin

    Inputs

    Outputs

    Configurations

    Scripting

    result: smisc

    Inputs

    Outputs

    Configurations

    Scripting

    result: stress solution to global cs

    Inputs

    Outputs

    Configurations

    Scripting

    result: elastic strain solution to global cs

    Inputs

    Outputs

    Configurations

    Scripting

    result: plastic strain to global cs

    Inputs

    Outputs

    Configurations

    Scripting

    result: prns to field

    Inputs

    Outputs

    Configurations

    Scripting

    mesh: mesh cutter

    Inputs

    Outputs

    Configurations

    Scripting

    result: remove rigid body motion (field)

    Inputs

    Outputs

    Configurations

    Scripting

    result: add rigid body motion (fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic expanded displacement

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic expanded stress

    Inputs

    Outputs

    Configurations

    Scripting

    result: cyclic volume

    Inputs

    Outputs

    Configurations

    Scripting

    invariant: eigen vectors (on fields container)

    Inputs

    Outputs

    Configurations

    Scripting

    serialization: migrate to vtk

    Inputs

    Outputs

    Configurations

    Scripting

    -