diff --git a/.ci/code_generation.py b/.ci/code_generation.py index 51b84540f4b..35ed83fe624 100644 --- a/.ci/code_generation.py +++ b/.ci/code_generation.py @@ -21,11 +21,14 @@ else: LIB_TO_GENERATE = [ "Ans.Dpf.Native.dll", + "Ans.Dpf.Mechanical.dll", "Ans.Dpf.FEMutils.dll", "meshOperatorsCore.dll", "mapdlOperatorsCore.dll", "Ans.Dpf.Math.dll", + "Ans.Dpf.PythonPluginWrapper.dll" "Ans.Dpf.Hdf5.dll", + "Ans.Dpf.FlowDiagram.dll", "Ans.Dpf.LSDYNAHGP.dll", "Ans.Dpf.LivePost.dll", "Ans.Dpf.PointCloudSearch.dll", @@ -38,6 +41,10 @@ for f in files: if Path(f).stem == "specification": continue + if Path(f).name == "build.py": + continue + if Path(f).name == "operator.mustache": + continue try: if os.path.isdir(f): shutil.rmtree(f) @@ -45,7 +52,7 @@ os.remove(f) except: pass -core.start_local_server() +core.start_local_server(config=core.AvailableServerConfigs.LegacyGrpcServer) code_gen = core.Operator("python_generator") code_gen.connect(1, TARGET_PATH) for lib in LIB_TO_GENERATE: @@ -55,6 +62,4 @@ else: code_gen.connect(2, True) code_gen.run() - time.sleep(0.1) - -core.SERVER.shutdown() + time.sleep(0.1) \ No newline at end of file diff --git a/.github/ansys_dpf_gate-0.2.0-py3-none-any.whl b/.github/ansys_dpf_gate-0.2.0-py3-none-any.whl deleted file mode 100644 index 604617a924f..00000000000 Binary files a/.github/ansys_dpf_gate-0.2.0-py3-none-any.whl and /dev/null differ diff --git a/.github/ansys_dpf_gatebin-0.2.0-py3-none-manylinux1_x86_64.whl b/.github/ansys_dpf_gatebin-0.2.0-py3-none-manylinux1_x86_64.whl deleted file mode 100644 index 3fb8f38613e..00000000000 Binary files a/.github/ansys_dpf_gatebin-0.2.0-py3-none-manylinux1_x86_64.whl and /dev/null differ diff --git a/.github/ansys_dpf_gatebin-0.2.0-py3-none-manylinux_2_17_x86_64.whl b/.github/ansys_dpf_gatebin-0.2.0-py3-none-manylinux_2_17_x86_64.whl deleted file mode 100644 index 8e7b71fcd05..00000000000 Binary files a/.github/ansys_dpf_gatebin-0.2.0-py3-none-manylinux_2_17_x86_64.whl and /dev/null differ diff --git a/.github/ansys_grpc_dpf-0.6.0-py3-none-any.whl b/.github/ansys_grpc_dpf-0.6.0-py3-none-any.whl deleted file mode 100644 index c716218d2ec..00000000000 Binary files a/.github/ansys_grpc_dpf-0.6.0-py3-none-any.whl and /dev/null differ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3ff60e9b9ab..d108935f88c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,7 +19,7 @@ env: PACKAGE_NAME: ansys-dpf-core MODULE: core ANSYS_VERSION: 231 - extra: "--pre --find-links .github/" + extra: "--find-links .github/" jobs: style: @@ -122,75 +122,91 @@ jobs: - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_launcher" shell: bash working-directory: test_launcher run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_server" shell: bash working-directory: test_server run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_local_server" shell: bash working-directory: test_local_server run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_multi_server" shell: bash working-directory: test_multi_server run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_remote_workflow" shell: bash working-directory: test_remote_workflow run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_remote_operator" shell: bash working-directory: test_remote_operator run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_workflow" shell: bash working-directory: test_workflow run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Upload Test Results" uses: actions/upload-artifact@v2 with: name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ env.ANSYS_VERSION }} path: tests/junit/test-results.xml + if: always() - name: "Upload coverage to Codecov" uses: codecov/codecov-action@v3 @@ -320,6 +336,7 @@ jobs: shell: bash run: | pip uninstall -y ansys-dpf-gatebin + if: always() - name: "Check sanity without gatebin INPROCESS" shell: bash @@ -328,6 +345,7 @@ jobs: python run_non_regression_examples.py env: DPF_SERVER_TYPE: INPROCESS + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 @@ -340,6 +358,7 @@ jobs: python run_non_regression_examples.py env: DPF_SERVER_TYPE: GRPC + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 @@ -352,6 +371,7 @@ jobs: python run_non_regression_examples.py env: DPF_SERVER_TYPE: LEGACYGRPC + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 @@ -366,7 +386,7 @@ jobs: matrix: python-version: ["3.8"] os: ["windows-latest", "ubuntu-latest"] - ANSYS_VERSION: ["221"] + ANSYS_VERSION: ["222", "221"] steps: - uses: actions/checkout@v3 @@ -433,75 +453,91 @@ jobs: - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_launcher" shell: bash working-directory: test_launcher run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results2.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_server" shell: bash working-directory: test_server run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results4.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_local_server" shell: bash working-directory: test_local_server run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results5.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_multi_server" shell: bash working-directory: test_multi_server run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results6.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_remote_workflow" shell: bash working-directory: test_remote_workflow run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results7.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_remote_operator" shell: bash working-directory: test_remote_operator run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results8.xml --reruns 2 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Test API test_workflow" shell: bash working-directory: test_workflow run: | pytest $DEBUG --cov=ansys.dpf.core --cov-report=xml --cov-report=html --cov-append --log-level=ERROR --junitxml=../tests/junit/test-results3.xml --reruns 3 . + if: always() - name: "Kill all servers" uses: pyansys/pydpf-actions/kill-dpf-servers@v2.2.dev1 + if: always() - name: "Upload Test Results" uses: actions/upload-artifact@v2 with: name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ matrix.ANSYS_VERSION }} path: tests/junit/test-results.xml + if: always() - name: "Upload coverage to Codecov" uses: codecov/codecov-action@v3 diff --git a/ansys/dpf/core/label_space.py b/ansys/dpf/core/label_space.py index 3c4e5c6b166..d97db6d0d2a 100644 --- a/ansys/dpf/core/label_space.py +++ b/ansys/dpf/core/label_space.py @@ -43,8 +43,8 @@ def _data_processing_core_api(self): return core_api def fill(self, label_space): - for key, id in label_space.items(): - self._api.label_space_add_data(self, key, id) + for key, index in label_space.items(): + self._api.label_space_add_data(self, key, index) def __dict__(self): if isinstance(self._internal_obj, dict): diff --git a/ansys/dpf/core/operators/__init__.py b/ansys/dpf/core/operators/__init__.py index 9e37b741aa2..526318cbdc6 100644 --- a/ansys/dpf/core/operators/__init__.py +++ b/ansys/dpf/core/operators/__init__.py @@ -1,20 +1,13 @@ -""" -.. _ref_operators_package: - -Operators ---------- -""" - -from . import result from . import math +from . import result from . import utility from . import min_max from . import scoping -from . import metadata -from . import logic -from . import mesh from . import filter +from . import logic +from . import metadata from . import serialization +from . import mesh from . import geo from . import averaging from . import invariant diff --git a/ansys/dpf/core/operators/averaging/__init__.py b/ansys/dpf/core/operators/averaging/__init__.py index 5c262a6cc66..6f8ee05aef7 100644 --- a/ansys/dpf/core/operators/averaging/__init__.py +++ b/ansys/dpf/core/operators/averaging/__init__.py @@ -13,6 +13,7 @@ from .elemental_fraction_fc import elemental_fraction_fc from .to_nodal import to_nodal from .to_nodal_fc import to_nodal_fc +from .nodal_extend_to_mid_nodes import nodal_extend_to_mid_nodes from .elemental_nodal_to_nodal_elemental import elemental_nodal_to_nodal_elemental from .extend_to_mid_nodes import extend_to_mid_nodes from .extend_to_mid_nodes_fc import extend_to_mid_nodes_fc diff --git a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py index 970909503e0..34154f68624 100644 --- a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py +++ b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py @@ -25,6 +25,9 @@ class elemental_nodal_to_nodal(Operator): Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities) + extend_to_mid_nodes : bool, optional + Compute mid nodes (when available) by + averaging neighbour primary nodes mesh : MeshedRegion, optional @@ -42,6 +45,8 @@ class elemental_nodal_to_nodal(Operator): >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) >>> my_should_average = bool() >>> op.inputs.should_average.connect(my_should_average) + >>> my_extend_to_mid_nodes = bool() + >>> op.inputs.extend_to_mid_nodes.connect(my_extend_to_mid_nodes) >>> my_mesh = dpf.MeshedRegion() >>> op.inputs.mesh.connect(my_mesh) @@ -50,11 +55,13 @@ class elemental_nodal_to_nodal(Operator): ... field=my_field, ... mesh_scoping=my_mesh_scoping, ... should_average=my_should_average, + ... extend_to_mid_nodes=my_extend_to_mid_nodes, ... mesh=my_mesh, ... ) >>> # Get output data >>> result_field = op.outputs.field() + >>> result_weight = op.outputs.weight() """ def __init__( @@ -62,6 +69,7 @@ def __init__( field=None, mesh_scoping=None, should_average=None, + extend_to_mid_nodes=None, mesh=None, config=None, server=None, @@ -75,6 +83,8 @@ def __init__( self.inputs.mesh_scoping.connect(mesh_scoping) if should_average is not None: self.inputs.should_average.connect(should_average) + if extend_to_mid_nodes is not None: + self.inputs.extend_to_mid_nodes.connect(extend_to_mid_nodes) if mesh is not None: self.inputs.mesh.connect(mesh) @@ -105,6 +115,13 @@ def _spec(): document="""Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities)""", + ), + 4: PinSpecification( + name="extend_to_mid_nodes", + type_names=["bool"], + optional=True, + document="""Compute mid nodes (when available) by + averaging neighbour primary nodes""", ), 7: PinSpecification( name="mesh", @@ -120,6 +137,14 @@ def _spec(): optional=False, document="""""", ), + 1: PinSpecification( + name="weight", + type_names=["property_field"], + optional=False, + document="""Gives for each node, the number of times it + was found in the elemental nodal + field. can be used to average later.""", + ), }, ) return spec @@ -175,6 +200,8 @@ class InputsElementalNodalToNodal(_Inputs): >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) >>> my_should_average = bool() >>> op.inputs.should_average.connect(my_should_average) + >>> my_extend_to_mid_nodes = bool() + >>> op.inputs.extend_to_mid_nodes.connect(my_extend_to_mid_nodes) >>> my_mesh = dpf.MeshedRegion() >>> op.inputs.mesh.connect(my_mesh) """ @@ -191,6 +218,10 @@ def __init__(self, op: Operator): elemental_nodal_to_nodal._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._should_average) + self._extend_to_mid_nodes = Input( + elemental_nodal_to_nodal._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._extend_to_mid_nodes) self._mesh = Input(elemental_nodal_to_nodal._spec().input_pin(7), 7, op, -1) self._inputs.append(self._mesh) @@ -257,6 +288,27 @@ def should_average(self): """ return self._should_average + @property + def extend_to_mid_nodes(self): + """Allows to connect extend_to_mid_nodes input to the operator. + + Compute mid nodes (when available) by + averaging neighbour primary nodes + + Parameters + ---------- + my_extend_to_mid_nodes : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() + >>> op.inputs.extend_to_mid_nodes.connect(my_extend_to_mid_nodes) + >>> # or + >>> op.inputs.extend_to_mid_nodes(my_extend_to_mid_nodes) + """ + return self._extend_to_mid_nodes + @property def mesh(self): """Allows to connect mesh input to the operator. @@ -286,12 +338,15 @@ class OutputsElementalNodalToNodal(_Outputs): >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() >>> # Connect inputs : op.inputs. ... >>> result_field = op.outputs.field() + >>> result_weight = op.outputs.weight() """ def __init__(self, op: Operator): super().__init__(elemental_nodal_to_nodal._spec().outputs, op) self._field = Output(elemental_nodal_to_nodal._spec().output_pin(0), 0, op) self._outputs.append(self._field) + self._weight = Output(elemental_nodal_to_nodal._spec().output_pin(1), 1, op) + self._outputs.append(self._weight) @property def field(self): @@ -309,3 +364,20 @@ def field(self): >>> result_field = op.outputs.field() """ # noqa: E501 return self._field + + @property + def weight(self): + """Allows to get weight output of the operator + + Returns + ---------- + my_weight : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal() + >>> # Connect inputs : op.inputs. ... + >>> result_weight = op.outputs.weight() + """ # noqa: E501 + return self._weight diff --git a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py index ccd63a6465a..67a4c2f7ba2 100644 --- a/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py +++ b/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py @@ -31,6 +31,9 @@ class elemental_nodal_to_nodal_fc(Operator): Average only on these nodes, if it is scoping container, the label must correspond to the one of the fields container + extend_to_mid_nodes : bool, optional + Compute mid nodes (when available) by + averaging neighbour primary nodes Examples @@ -49,6 +52,8 @@ class elemental_nodal_to_nodal_fc(Operator): >>> op.inputs.should_average.connect(my_should_average) >>> my_scoping = dpf.Scoping() >>> op.inputs.scoping.connect(my_scoping) + >>> my_extend_to_mid_nodes = bool() + >>> op.inputs.extend_to_mid_nodes.connect(my_extend_to_mid_nodes) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc( @@ -56,10 +61,12 @@ class elemental_nodal_to_nodal_fc(Operator): ... mesh=my_mesh, ... should_average=my_should_average, ... scoping=my_scoping, + ... extend_to_mid_nodes=my_extend_to_mid_nodes, ... ) >>> # Get output data >>> result_fields_container = op.outputs.fields_container() + >>> result_weights = op.outputs.weights() """ def __init__( @@ -68,6 +75,7 @@ def __init__( mesh=None, should_average=None, scoping=None, + extend_to_mid_nodes=None, config=None, server=None, ): @@ -84,6 +92,8 @@ def __init__( self.inputs.should_average.connect(should_average) if scoping is not None: self.inputs.scoping.connect(scoping) + if extend_to_mid_nodes is not None: + self.inputs.extend_to_mid_nodes.connect(extend_to_mid_nodes) @staticmethod def _spec(): @@ -125,6 +135,13 @@ def _spec(): container, the label must correspond to the one of the fields container""", ), + 4: PinSpecification( + name="extend_to_mid_nodes", + type_names=["bool"], + optional=True, + document="""Compute mid nodes (when available) by + averaging neighbour primary nodes""", + ), }, map_output_pin_spec={ 0: PinSpecification( @@ -133,6 +150,16 @@ def _spec(): optional=False, document="""""", ), + 1: PinSpecification( + name="weights", + type_names=[ + "class dataProcessing::DpfTypeCollection" + ], + optional=False, + document="""Gives for each node, the number of times it + was found in the elemental nodal + field. can be used to average later.""", + ), }, ) return spec @@ -192,6 +219,8 @@ class InputsElementalNodalToNodalFc(_Inputs): >>> op.inputs.should_average.connect(my_should_average) >>> my_scoping = dpf.Scoping() >>> op.inputs.scoping.connect(my_scoping) + >>> my_extend_to_mid_nodes = bool() + >>> op.inputs.extend_to_mid_nodes.connect(my_extend_to_mid_nodes) """ def __init__(self, op: Operator): @@ -210,6 +239,10 @@ def __init__(self, op: Operator): elemental_nodal_to_nodal_fc._spec().input_pin(3), 3, op, -1 ) self._inputs.append(self._scoping) + self._extend_to_mid_nodes = Input( + elemental_nodal_to_nodal_fc._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._extend_to_mid_nodes) @property def fields_container(self): @@ -295,6 +328,27 @@ def scoping(self): """ return self._scoping + @property + def extend_to_mid_nodes(self): + """Allows to connect extend_to_mid_nodes input to the operator. + + Compute mid nodes (when available) by + averaging neighbour primary nodes + + Parameters + ---------- + my_extend_to_mid_nodes : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() + >>> op.inputs.extend_to_mid_nodes.connect(my_extend_to_mid_nodes) + >>> # or + >>> op.inputs.extend_to_mid_nodes(my_extend_to_mid_nodes) + """ + return self._extend_to_mid_nodes + class OutputsElementalNodalToNodalFc(_Outputs): """Intermediate class used to get outputs from @@ -306,6 +360,7 @@ class OutputsElementalNodalToNodalFc(_Outputs): >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() >>> # Connect inputs : op.inputs. ... >>> result_fields_container = op.outputs.fields_container() + >>> result_weights = op.outputs.weights() """ def __init__(self, op: Operator): @@ -314,6 +369,8 @@ def __init__(self, op: Operator): elemental_nodal_to_nodal_fc._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) + self._weights = Output(elemental_nodal_to_nodal_fc._spec().output_pin(1), 1, op) + self._outputs.append(self._weights) @property def fields_container(self): @@ -331,3 +388,21 @@ def fields_container(self): >>> result_fields_container = op.outputs.fields_container() """ # noqa: E501 return self._fields_container + + @property + def weights(self): + """Allows to get weights output of the operator + + Returns + ---------- + my_weights : Class Dataprocessing::Dpftypecollection<Class + Dataprocessing::Cpropertyfield> + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_weights = op.outputs.weights() + """ # noqa: E501 + return self._weights diff --git a/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py b/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py index f300c024712..52e2dbd02cc 100644 --- a/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py +++ b/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py @@ -11,8 +11,8 @@ class extend_to_mid_nodes(Operator): - """Extends ElementalNodal field defined on corner nodes to a - ElementalNodal field defined also on the mid nodes. + """Extends an ElementalNodal or Nodal field defined on corner nodes to a + field defined also on the mid nodes. Parameters ---------- @@ -56,8 +56,8 @@ def __init__(self, field=None, mesh=None, config=None, server=None): @staticmethod def _spec(): - description = """Extends ElementalNodal field defined on corner nodes to a - ElementalNodal field defined also on the mid nodes.""" + description = """Extends an ElementalNodal or Nodal field defined on corner nodes to a + field defined also on the mid nodes.""" spec = Specification( description=description, map_input_pin_spec={ diff --git a/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py b/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py index 14906580ddb..3c2869cf437 100644 --- a/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py +++ b/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py @@ -11,7 +11,7 @@ class extend_to_mid_nodes_fc(Operator): - """Extends ElementalNodal fields defined on corner nodes to + """Extends ElementalNodal or Nodal fields defined on corner nodes to ElementalNodal fields defined also on the mid nodes. Parameters @@ -57,7 +57,7 @@ def __init__(self, fields_container=None, mesh=None, config=None, server=None): @staticmethod def _spec(): - description = """Extends ElementalNodal fields defined on corner nodes to + description = """Extends ElementalNodal or Nodal fields defined on corner nodes to ElementalNodal fields defined also on the mid nodes.""" spec = Specification( description=description, diff --git a/ansys/dpf/core/operators/geo/moment_of_inertia.py b/ansys/dpf/core/operators/averaging/nodal_extend_to_mid_nodes.py similarity index 52% rename from ansys/dpf/core/operators/geo/moment_of_inertia.py rename to ansys/dpf/core/operators/averaging/nodal_extend_to_mid_nodes.py index f061ef7c1ce..5d292afe201 100644 --- a/ansys/dpf/core/operators/geo/moment_of_inertia.py +++ b/ansys/dpf/core/operators/averaging/nodal_extend_to_mid_nodes.py @@ -1,6 +1,6 @@ """ -moment_of_inertia -================= +nodal_extend_to_mid_nodes +========================= Autogenerated DPF operator classes. """ from warnings import warn @@ -10,21 +10,21 @@ from ansys.dpf.core.operators.specification import PinSpecification, Specification -class moment_of_inertia(Operator): - """Compute the inertia tensor of a set of elements. +class nodal_extend_to_mid_nodes(Operator): + """Extends a Nodal field defined on corner nodes to a field defined also + on mid nodes. Parameters ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + node_scoping : Scoping, optional + Nodes expected in output. + element_scoping : Scoping, optional + Precomputed list of elements of interest + (improves performances). mesh : MeshedRegion, optional - mesh_scoping : Scoping, optional - Mesh scoping, if not set, all the elements of - the mesh are considered. - field : Field, optional - Elemental or nodal ponderation used in - computation. - boolean : bool, optional - Default true, compute inertia tensor at - center of gravity. Examples @@ -32,24 +32,24 @@ class moment_of_inertia(Operator): >>> from ansys.dpf import core as dpf >>> # Instantiate operator - >>> op = dpf.operators.geo.moment_of_inertia() + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes() >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) >>> my_field = dpf.Field() >>> op.inputs.field.connect(my_field) - >>> my_boolean = bool() - >>> op.inputs.boolean.connect(my_boolean) + >>> my_node_scoping = dpf.Scoping() + >>> op.inputs.node_scoping.connect(my_node_scoping) + >>> my_element_scoping = dpf.Scoping() + >>> op.inputs.element_scoping.connect(my_element_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.moment_of_inertia( - ... mesh=my_mesh, - ... mesh_scoping=my_mesh_scoping, + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes( ... field=my_field, - ... boolean=my_boolean, + ... node_scoping=my_node_scoping, + ... element_scoping=my_element_scoping, + ... mesh=my_mesh, ... ) >>> # Get output data @@ -58,59 +58,57 @@ class moment_of_inertia(Operator): def __init__( self, - mesh=None, - mesh_scoping=None, field=None, - boolean=None, + node_scoping=None, + element_scoping=None, + mesh=None, config=None, server=None, ): - super().__init__( - name="topology::moment_of_inertia", config=config, server=server - ) - self._inputs = InputsMomentOfInertia(self) - self._outputs = OutputsMomentOfInertia(self) - if mesh is not None: - self.inputs.mesh.connect(mesh) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) + super().__init__(name="nodal_extend_to_mid_nodes", config=config, server=server) + self._inputs = InputsNodalExtendToMidNodes(self) + self._outputs = OutputsNodalExtendToMidNodes(self) if field is not None: self.inputs.field.connect(field) - if boolean is not None: - self.inputs.boolean.connect(boolean) + if node_scoping is not None: + self.inputs.node_scoping.connect(node_scoping) + if element_scoping is not None: + self.inputs.element_scoping.connect(element_scoping) + if mesh is not None: + self.inputs.mesh.connect(mesh) @staticmethod def _spec(): - description = """Compute the inertia tensor of a set of elements.""" + description = """Extends a Nodal field defined on corner nodes to a field defined also + on mid nodes.""" spec = Specification( description=description, map_input_pin_spec={ 0: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", ), 1: PinSpecification( - name="mesh_scoping", + name="node_scoping", type_names=["scoping"], optional=True, - document="""Mesh scoping, if not set, all the elements of - the mesh are considered.""", + document="""Nodes expected in output.""", ), - 2: PinSpecification( - name="field", - type_names=["field"], + 5: PinSpecification( + name="element_scoping", + type_names=["scoping"], optional=True, - document="""Elemental or nodal ponderation used in - computation.""", + document="""Precomputed list of elements of interest + (improves performances).""", ), - 3: PinSpecification( - name="boolean", - type_names=["bool"], + 7: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], optional=True, - document="""Default true, compute inertia tensor at - center of gravity.""", + document="""""", ), }, map_output_pin_spec={ @@ -138,9 +136,7 @@ def default_config(server=None): Server with channel connected to the remote or local instance. When ``None``, attempts to use the global server. """ - return Operator.default_config( - name="topology::moment_of_inertia", server=server - ) + return Operator.default_config(name="nodal_extend_to_mid_nodes", server=server) @property def inputs(self): @@ -148,7 +144,7 @@ def inputs(self): Returns -------- - inputs : InputsMomentOfInertia + inputs : InputsNodalExtendToMidNodes """ return super().inputs @@ -158,137 +154,140 @@ def outputs(self): Returns -------- - outputs : OutputsMomentOfInertia + outputs : OutputsNodalExtendToMidNodes """ return super().outputs -class InputsMomentOfInertia(_Inputs): +class InputsNodalExtendToMidNodes(_Inputs): """Intermediate class used to connect user inputs to - moment_of_inertia operator. + nodal_extend_to_mid_nodes operator. Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes() >>> my_field = dpf.Field() >>> op.inputs.field.connect(my_field) - >>> my_boolean = bool() - >>> op.inputs.boolean.connect(my_boolean) + >>> my_node_scoping = dpf.Scoping() + >>> op.inputs.node_scoping.connect(my_node_scoping) + >>> my_element_scoping = dpf.Scoping() + >>> op.inputs.element_scoping.connect(my_element_scoping) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) """ def __init__(self, op: Operator): - super().__init__(moment_of_inertia._spec().inputs, op) - self._mesh = Input(moment_of_inertia._spec().input_pin(0), 0, op, -1) - self._inputs.append(self._mesh) - self._mesh_scoping = Input(moment_of_inertia._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._mesh_scoping) - self._field = Input(moment_of_inertia._spec().input_pin(2), 2, op, -1) + super().__init__(nodal_extend_to_mid_nodes._spec().inputs, op) + self._field = Input(nodal_extend_to_mid_nodes._spec().input_pin(0), 0, op, -1) self._inputs.append(self._field) - self._boolean = Input(moment_of_inertia._spec().input_pin(3), 3, op, -1) - self._inputs.append(self._boolean) + self._node_scoping = Input( + nodal_extend_to_mid_nodes._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._node_scoping) + self._element_scoping = Input( + nodal_extend_to_mid_nodes._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._element_scoping) + self._mesh = Input(nodal_extend_to_mid_nodes._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._mesh) @property - def mesh(self): - """Allows to connect mesh input to the operator. + def field(self): + """Allows to connect field input to the operator. + + Field or fields container with only one field + is expected Parameters ---------- - my_mesh : MeshedRegion + my_field : Field or FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() - >>> op.inputs.mesh.connect(my_mesh) + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes() + >>> op.inputs.field.connect(my_field) >>> # or - >>> op.inputs.mesh(my_mesh) + >>> op.inputs.field(my_field) """ - return self._mesh + return self._field @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. + def node_scoping(self): + """Allows to connect node_scoping input to the operator. - Mesh scoping, if not set, all the elements of - the mesh are considered. + Nodes expected in output. Parameters ---------- - my_mesh_scoping : Scoping + my_node_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes() + >>> op.inputs.node_scoping.connect(my_node_scoping) >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) + >>> op.inputs.node_scoping(my_node_scoping) """ - return self._mesh_scoping + return self._node_scoping @property - def field(self): - """Allows to connect field input to the operator. + def element_scoping(self): + """Allows to connect element_scoping input to the operator. - Elemental or nodal ponderation used in - computation. + Precomputed list of elements of interest + (improves performances). Parameters ---------- - my_field : Field + my_element_scoping : Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() - >>> op.inputs.field.connect(my_field) + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes() + >>> op.inputs.element_scoping.connect(my_element_scoping) >>> # or - >>> op.inputs.field(my_field) + >>> op.inputs.element_scoping(my_element_scoping) """ - return self._field + return self._element_scoping @property - def boolean(self): - """Allows to connect boolean input to the operator. - - Default true, compute inertia tensor at - center of gravity. + def mesh(self): + """Allows to connect mesh input to the operator. Parameters ---------- - my_boolean : bool + my_mesh : MeshedRegion Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() - >>> op.inputs.boolean.connect(my_boolean) + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes() + >>> op.inputs.mesh.connect(my_mesh) >>> # or - >>> op.inputs.boolean(my_boolean) + >>> op.inputs.mesh(my_mesh) """ - return self._boolean + return self._mesh -class OutputsMomentOfInertia(_Outputs): +class OutputsNodalExtendToMidNodes(_Outputs): """Intermediate class used to get outputs from - moment_of_inertia operator. + nodal_extend_to_mid_nodes operator. Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes() >>> # Connect inputs : op.inputs. ... >>> result_field = op.outputs.field() """ def __init__(self, op: Operator): - super().__init__(moment_of_inertia._spec().outputs, op) - self._field = Output(moment_of_inertia._spec().output_pin(0), 0, op) + super().__init__(nodal_extend_to_mid_nodes._spec().outputs, op) + self._field = Output(nodal_extend_to_mid_nodes._spec().output_pin(0), 0, op) self._outputs.append(self._field) @property @@ -302,7 +301,7 @@ def field(self): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.moment_of_inertia() + >>> op = dpf.operators.averaging.nodal_extend_to_mid_nodes() >>> # Connect inputs : op.inputs. ... >>> result_field = op.outputs.field() """ # noqa: E501 diff --git a/ansys/dpf/core/operators/build.py b/ansys/dpf/core/operators/build.py index 9831e255f8b..ef2e335e136 100644 --- a/ansys/dpf/core/operators/build.py +++ b/ansys/dpf/core/operators/build.py @@ -13,6 +13,8 @@ from ansys.dpf.core.mapping_types import map_types_to_python +dpf.start_local_server(config=dpf.AvailableServerConfigs.LegacyGrpcServer) + def build_docstring(specification): """Used to generate class docstrings.""" docstring = "" diff --git a/ansys/dpf/core/operators/filter/__init__.py b/ansys/dpf/core/operators/filter/__init__.py index e8e83f04947..22f3a46736c 100644 --- a/ansys/dpf/core/operators/filter/__init__.py +++ b/ansys/dpf/core/operators/filter/__init__.py @@ -1,3 +1,4 @@ +from .field_signed_high_pass import field_signed_high_pass from .field_band_pass_fc import field_band_pass_fc from .scoping_low_pass import scoping_low_pass from .field_high_pass import field_high_pass @@ -7,3 +8,4 @@ from .field_low_pass_fc import field_low_pass_fc from .field_band_pass import field_band_pass from .scoping_band_pass import scoping_band_pass +from .signed_scoping_high_pass import signed_scoping_high_pass diff --git a/ansys/dpf/core/operators/filter/field_band_pass.py b/ansys/dpf/core/operators/filter/field_band_pass.py index adb5bf05531..89971ec1d11 100644 --- a/ansys/dpf/core/operators/filter/field_band_pass.py +++ b/ansys/dpf/core/operators/filter/field_band_pass.py @@ -23,7 +23,7 @@ class field_band_pass(Operator): min_threshold : float or Field A min threshold scalar or a field containing one value is expected - max_threshold : float or Field + max_threshold : float or Field, optional A max threshold scalar or a field containing one value is expected @@ -97,7 +97,7 @@ def _spec(): 2: PinSpecification( name="max_threshold", type_names=["double", "field"], - optional=False, + optional=True, document="""A max threshold scalar or a field containing one value is expected""", ), diff --git a/ansys/dpf/core/operators/filter/field_band_pass_fc.py b/ansys/dpf/core/operators/filter/field_band_pass_fc.py index f24e69acf0e..a51fa793e58 100644 --- a/ansys/dpf/core/operators/filter/field_band_pass_fc.py +++ b/ansys/dpf/core/operators/filter/field_band_pass_fc.py @@ -23,7 +23,7 @@ class field_band_pass_fc(Operator): min_threshold : float or Field A min threshold scalar or a field containing one value is expected - max_threshold : float or Field + max_threshold : float or Field, optional A max threshold scalar or a field containing one value is expected @@ -97,7 +97,7 @@ def _spec(): 2: PinSpecification( name="max_threshold", type_names=["double", "field"], - optional=False, + optional=True, document="""A max threshold scalar or a field containing one value is expected""", ), diff --git a/ansys/dpf/core/operators/filter/field_high_pass.py b/ansys/dpf/core/operators/filter/field_high_pass.py index fb41c242d60..64f85b6f19b 100644 --- a/ansys/dpf/core/operators/filter/field_high_pass.py +++ b/ansys/dpf/core/operators/filter/field_high_pass.py @@ -22,6 +22,11 @@ class field_high_pass(Operator): threshold : float or Field A threshold scalar or a field containing one value is expected + both : bool, optional + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 Examples @@ -36,18 +41,21 @@ class field_high_pass(Operator): >>> op.inputs.field.connect(my_field) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.filter.field_high_pass( ... field=my_field, ... threshold=my_threshold, + ... both=my_both, ... ) >>> # Get output data >>> result_field = op.outputs.field() """ - def __init__(self, field=None, threshold=None, config=None, server=None): + def __init__(self, field=None, threshold=None, both=None, config=None, server=None): super().__init__(name="core::field::high_pass", config=config, server=server) self._inputs = InputsFieldHighPass(self) self._outputs = OutputsFieldHighPass(self) @@ -55,6 +63,8 @@ def __init__(self, field=None, threshold=None, config=None, server=None): self.inputs.field.connect(field) if threshold is not None: self.inputs.threshold.connect(threshold) + if both is not None: + self.inputs.both.connect(both) @staticmethod def _spec(): @@ -77,6 +87,15 @@ def _spec(): document="""A threshold scalar or a field containing one value is expected""", ), + 2: PinSpecification( + name="both", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1""", + ), }, map_output_pin_spec={ 0: PinSpecification( @@ -138,6 +157,8 @@ class InputsFieldHighPass(_Inputs): >>> op.inputs.field.connect(my_field) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) """ def __init__(self, op: Operator): @@ -146,6 +167,8 @@ def __init__(self, op: Operator): self._inputs.append(self._field) self._threshold = Input(field_high_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) + self._both = Input(field_high_pass._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._both) @property def field(self): @@ -189,6 +212,29 @@ def threshold(self): """ return self._threshold + @property + def both(self): + """Allows to connect both input to the operator. + + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + Parameters + ---------- + my_both : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_high_pass() + >>> op.inputs.both.connect(my_both) + >>> # or + >>> op.inputs.both(my_both) + """ + return self._both + class OutputsFieldHighPass(_Outputs): """Intermediate class used to get outputs from diff --git a/ansys/dpf/core/operators/filter/field_high_pass_fc.py b/ansys/dpf/core/operators/filter/field_high_pass_fc.py index 1b570a510d1..bd8f74ae6ab 100644 --- a/ansys/dpf/core/operators/filter/field_high_pass_fc.py +++ b/ansys/dpf/core/operators/filter/field_high_pass_fc.py @@ -22,6 +22,11 @@ class field_high_pass_fc(Operator): threshold : float or Field A threshold scalar or a field containing one value is expected + both : bool, optional + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 Examples @@ -36,18 +41,23 @@ class field_high_pass_fc(Operator): >>> op.inputs.fields_container.connect(my_fields_container) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.filter.field_high_pass_fc( ... fields_container=my_fields_container, ... threshold=my_threshold, + ... both=my_both, ... ) >>> # Get output data >>> result_fields_container = op.outputs.fields_container() """ - def __init__(self, fields_container=None, threshold=None, config=None, server=None): + def __init__( + self, fields_container=None, threshold=None, both=None, config=None, server=None + ): super().__init__(name="core::field::high_pass_fc", config=config, server=server) self._inputs = InputsFieldHighPassFc(self) self._outputs = OutputsFieldHighPassFc(self) @@ -55,6 +65,8 @@ def __init__(self, fields_container=None, threshold=None, config=None, server=No self.inputs.fields_container.connect(fields_container) if threshold is not None: self.inputs.threshold.connect(threshold) + if both is not None: + self.inputs.both.connect(both) @staticmethod def _spec(): @@ -77,6 +89,15 @@ def _spec(): document="""A threshold scalar or a field containing one value is expected""", ), + 2: PinSpecification( + name="both", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1""", + ), }, map_output_pin_spec={ 0: PinSpecification( @@ -138,6 +159,8 @@ class InputsFieldHighPassFc(_Inputs): >>> op.inputs.fields_container.connect(my_fields_container) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) """ def __init__(self, op: Operator): @@ -148,6 +171,8 @@ def __init__(self, op: Operator): self._inputs.append(self._fields_container) self._threshold = Input(field_high_pass_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) + self._both = Input(field_high_pass_fc._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._both) @property def fields_container(self): @@ -191,6 +216,29 @@ def threshold(self): """ return self._threshold + @property + def both(self): + """Allows to connect both input to the operator. + + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + Parameters + ---------- + my_both : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_high_pass_fc() + >>> op.inputs.both.connect(my_both) + >>> # or + >>> op.inputs.both(my_both) + """ + return self._both + class OutputsFieldHighPassFc(_Outputs): """Intermediate class used to get outputs from diff --git a/ansys/dpf/core/operators/filter/field_low_pass.py b/ansys/dpf/core/operators/filter/field_low_pass.py index f86db5eb081..d12d3510317 100644 --- a/ansys/dpf/core/operators/filter/field_low_pass.py +++ b/ansys/dpf/core/operators/filter/field_low_pass.py @@ -22,6 +22,11 @@ class field_low_pass(Operator): threshold : float or Field A threshold scalar or a field containing one value is expected + both : bool, optional + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 Examples @@ -36,18 +41,21 @@ class field_low_pass(Operator): >>> op.inputs.field.connect(my_field) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.filter.field_low_pass( ... field=my_field, ... threshold=my_threshold, + ... both=my_both, ... ) >>> # Get output data >>> result_field = op.outputs.field() """ - def __init__(self, field=None, threshold=None, config=None, server=None): + def __init__(self, field=None, threshold=None, both=None, config=None, server=None): super().__init__(name="core::field::low_pass", config=config, server=server) self._inputs = InputsFieldLowPass(self) self._outputs = OutputsFieldLowPass(self) @@ -55,6 +63,8 @@ def __init__(self, field=None, threshold=None, config=None, server=None): self.inputs.field.connect(field) if threshold is not None: self.inputs.threshold.connect(threshold) + if both is not None: + self.inputs.both.connect(both) @staticmethod def _spec(): @@ -77,6 +87,15 @@ def _spec(): document="""A threshold scalar or a field containing one value is expected""", ), + 2: PinSpecification( + name="both", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1""", + ), }, map_output_pin_spec={ 0: PinSpecification( @@ -138,6 +157,8 @@ class InputsFieldLowPass(_Inputs): >>> op.inputs.field.connect(my_field) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) """ def __init__(self, op: Operator): @@ -146,6 +167,8 @@ def __init__(self, op: Operator): self._inputs.append(self._field) self._threshold = Input(field_low_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) + self._both = Input(field_low_pass._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._both) @property def field(self): @@ -189,6 +212,29 @@ def threshold(self): """ return self._threshold + @property + def both(self): + """Allows to connect both input to the operator. + + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + Parameters + ---------- + my_both : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_low_pass() + >>> op.inputs.both.connect(my_both) + >>> # or + >>> op.inputs.both(my_both) + """ + return self._both + class OutputsFieldLowPass(_Outputs): """Intermediate class used to get outputs from diff --git a/ansys/dpf/core/operators/filter/field_low_pass_fc.py b/ansys/dpf/core/operators/filter/field_low_pass_fc.py index 64d6bba5aec..254d9dd90e8 100644 --- a/ansys/dpf/core/operators/filter/field_low_pass_fc.py +++ b/ansys/dpf/core/operators/filter/field_low_pass_fc.py @@ -22,6 +22,11 @@ class field_low_pass_fc(Operator): threshold : float or Field A threshold scalar or a field containing one value is expected + both : bool, optional + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 Examples @@ -36,18 +41,23 @@ class field_low_pass_fc(Operator): >>> op.inputs.fields_container.connect(my_fields_container) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.filter.field_low_pass_fc( ... fields_container=my_fields_container, ... threshold=my_threshold, + ... both=my_both, ... ) >>> # Get output data >>> result_fields_container = op.outputs.fields_container() """ - def __init__(self, fields_container=None, threshold=None, config=None, server=None): + def __init__( + self, fields_container=None, threshold=None, both=None, config=None, server=None + ): super().__init__(name="core::field::low_pass_fc", config=config, server=server) self._inputs = InputsFieldLowPassFc(self) self._outputs = OutputsFieldLowPassFc(self) @@ -55,6 +65,8 @@ def __init__(self, fields_container=None, threshold=None, config=None, server=No self.inputs.fields_container.connect(fields_container) if threshold is not None: self.inputs.threshold.connect(threshold) + if both is not None: + self.inputs.both.connect(both) @staticmethod def _spec(): @@ -77,6 +89,15 @@ def _spec(): document="""A threshold scalar or a field containing one value is expected""", ), + 2: PinSpecification( + name="both", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1""", + ), }, map_output_pin_spec={ 0: PinSpecification( @@ -138,6 +159,8 @@ class InputsFieldLowPassFc(_Inputs): >>> op.inputs.fields_container.connect(my_fields_container) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) """ def __init__(self, op: Operator): @@ -148,6 +171,8 @@ def __init__(self, op: Operator): self._inputs.append(self._fields_container) self._threshold = Input(field_low_pass_fc._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) + self._both = Input(field_low_pass_fc._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._both) @property def fields_container(self): @@ -191,6 +216,29 @@ def threshold(self): """ return self._threshold + @property + def both(self): + """Allows to connect both input to the operator. + + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + Parameters + ---------- + my_both : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_low_pass_fc() + >>> op.inputs.both.connect(my_both) + >>> # or + >>> op.inputs.both(my_both) + """ + return self._both + class OutputsFieldLowPassFc(_Outputs): """Intermediate class used to get outputs from diff --git a/ansys/dpf/core/operators/filter/field_signed_high_pass.py b/ansys/dpf/core/operators/filter/field_signed_high_pass.py new file mode 100644 index 00000000000..3fbf80b4341 --- /dev/null +++ b/ansys/dpf/core/operators/filter/field_signed_high_pass.py @@ -0,0 +1,275 @@ +""" +field_signed_high_pass +====================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class field_signed_high_pass(Operator): + """The high pass filter returns all the values superior or equal in + absolute value to the threshold value in input. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + threshold : float or Field + A threshold scalar or a field containing one + value is expected + both : bool, optional + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.field_signed_high_pass() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.field_signed_high_pass( + ... field=my_field, + ... threshold=my_threshold, + ... both=my_both, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, field=None, threshold=None, both=None, config=None, server=None): + super().__init__( + name="core::field::signed_high_pass", config=config, server=server + ) + self._inputs = InputsFieldSignedHighPass(self) + self._outputs = OutputsFieldSignedHighPass(self) + if field is not None: + self.inputs.field.connect(field) + if threshold is not None: + self.inputs.threshold.connect(threshold) + if both is not None: + self.inputs.both.connect(both) + + @staticmethod + def _spec(): + description = """The high pass filter returns all the values superior or equal in + absolute value to the threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="threshold", + type_names=["double", "field"], + optional=False, + document="""A threshold scalar or a field containing one + value is expected""", + ), + 2: PinSpecification( + name="both", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="core::field::signed_high_pass", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsFieldSignedHighPass + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsFieldSignedHighPass + """ + return super().outputs + + +class InputsFieldSignedHighPass(_Inputs): + """Intermediate class used to connect user inputs to + field_signed_high_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_signed_high_pass() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) + """ + + def __init__(self, op: Operator): + super().__init__(field_signed_high_pass._spec().inputs, op) + self._field = Input(field_signed_high_pass._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._field) + self._threshold = Input(field_signed_high_pass._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._threshold) + self._both = Input(field_signed_high_pass._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._both) + + @property + def field(self): + """Allows to connect field input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_field : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_signed_high_pass() + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) + """ + return self._field + + @property + def threshold(self): + """Allows to connect threshold input to the operator. + + A threshold scalar or a field containing one + value is expected + + Parameters + ---------- + my_threshold : float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_signed_high_pass() + >>> op.inputs.threshold.connect(my_threshold) + >>> # or + >>> op.inputs.threshold(my_threshold) + """ + return self._threshold + + @property + def both(self): + """Allows to connect both input to the operator. + + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + Parameters + ---------- + my_both : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_signed_high_pass() + >>> op.inputs.both.connect(my_both) + >>> # or + >>> op.inputs.both(my_both) + """ + return self._both + + +class OutputsFieldSignedHighPass(_Outputs): + """Intermediate class used to get outputs from + field_signed_high_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_signed_high_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(field_signed_high_pass._spec().outputs, op) + self._field = Output(field_signed_high_pass._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.field_signed_high_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/filter/scoping_band_pass.py b/ansys/dpf/core/operators/filter/scoping_band_pass.py index 4922a61a25f..65501765423 100644 --- a/ansys/dpf/core/operators/filter/scoping_band_pass.py +++ b/ansys/dpf/core/operators/filter/scoping_band_pass.py @@ -23,7 +23,7 @@ class scoping_band_pass(Operator): min_threshold : float or Field A min threshold scalar or a field containing one value is expected - max_threshold : float or Field + max_threshold : float or Field, optional A max threshold scalar or a field containing one value is expected @@ -97,7 +97,7 @@ def _spec(): 2: PinSpecification( name="max_threshold", type_names=["double", "field"], - optional=False, + optional=True, document="""A max threshold scalar or a field containing one value is expected""", ), diff --git a/ansys/dpf/core/operators/filter/scoping_high_pass.py b/ansys/dpf/core/operators/filter/scoping_high_pass.py index 3c0a37aafa9..56555c6f18c 100644 --- a/ansys/dpf/core/operators/filter/scoping_high_pass.py +++ b/ansys/dpf/core/operators/filter/scoping_high_pass.py @@ -22,6 +22,11 @@ class scoping_high_pass(Operator): threshold : float or Field A threshold scalar or a field containing one value is expected + both : bool, optional + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 Examples @@ -36,18 +41,21 @@ class scoping_high_pass(Operator): >>> op.inputs.field.connect(my_field) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.filter.scoping_high_pass( ... field=my_field, ... threshold=my_threshold, + ... both=my_both, ... ) >>> # Get output data >>> result_scoping = op.outputs.scoping() """ - def __init__(self, field=None, threshold=None, config=None, server=None): + def __init__(self, field=None, threshold=None, both=None, config=None, server=None): super().__init__(name="core::scoping::high_pass", config=config, server=server) self._inputs = InputsScopingHighPass(self) self._outputs = OutputsScopingHighPass(self) @@ -55,6 +63,8 @@ def __init__(self, field=None, threshold=None, config=None, server=None): self.inputs.field.connect(field) if threshold is not None: self.inputs.threshold.connect(threshold) + if both is not None: + self.inputs.both.connect(both) @staticmethod def _spec(): @@ -77,6 +87,15 @@ def _spec(): document="""A threshold scalar or a field containing one value is expected""", ), + 2: PinSpecification( + name="both", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1""", + ), }, map_output_pin_spec={ 0: PinSpecification( @@ -138,6 +157,8 @@ class InputsScopingHighPass(_Inputs): >>> op.inputs.field.connect(my_field) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) """ def __init__(self, op: Operator): @@ -146,6 +167,8 @@ def __init__(self, op: Operator): self._inputs.append(self._field) self._threshold = Input(scoping_high_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) + self._both = Input(scoping_high_pass._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._both) @property def field(self): @@ -189,6 +212,29 @@ def threshold(self): """ return self._threshold + @property + def both(self): + """Allows to connect both input to the operator. + + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + Parameters + ---------- + my_both : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.scoping_high_pass() + >>> op.inputs.both.connect(my_both) + >>> # or + >>> op.inputs.both(my_both) + """ + return self._both + class OutputsScopingHighPass(_Outputs): """Intermediate class used to get outputs from diff --git a/ansys/dpf/core/operators/filter/scoping_low_pass.py b/ansys/dpf/core/operators/filter/scoping_low_pass.py index e4981111d67..2dadec3ecb2 100644 --- a/ansys/dpf/core/operators/filter/scoping_low_pass.py +++ b/ansys/dpf/core/operators/filter/scoping_low_pass.py @@ -22,6 +22,11 @@ class scoping_low_pass(Operator): threshold : float or Field A threshold scalar or a field containing one value is expected + both : bool, optional + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 Examples @@ -36,18 +41,21 @@ class scoping_low_pass(Operator): >>> op.inputs.field.connect(my_field) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.filter.scoping_low_pass( ... field=my_field, ... threshold=my_threshold, + ... both=my_both, ... ) >>> # Get output data >>> result_scoping = op.outputs.scoping() """ - def __init__(self, field=None, threshold=None, config=None, server=None): + def __init__(self, field=None, threshold=None, both=None, config=None, server=None): super().__init__(name="core::scoping::low_pass", config=config, server=server) self._inputs = InputsScopingLowPass(self) self._outputs = OutputsScopingLowPass(self) @@ -55,6 +63,8 @@ def __init__(self, field=None, threshold=None, config=None, server=None): self.inputs.field.connect(field) if threshold is not None: self.inputs.threshold.connect(threshold) + if both is not None: + self.inputs.both.connect(both) @staticmethod def _spec(): @@ -77,6 +87,15 @@ def _spec(): document="""A threshold scalar or a field containing one value is expected""", ), + 2: PinSpecification( + name="both", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1""", + ), }, map_output_pin_spec={ 0: PinSpecification( @@ -138,6 +157,8 @@ class InputsScopingLowPass(_Inputs): >>> op.inputs.field.connect(my_field) >>> my_threshold = float() >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) """ def __init__(self, op: Operator): @@ -146,6 +167,8 @@ def __init__(self, op: Operator): self._inputs.append(self._field) self._threshold = Input(scoping_low_pass._spec().input_pin(1), 1, op, -1) self._inputs.append(self._threshold) + self._both = Input(scoping_low_pass._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._both) @property def field(self): @@ -189,6 +212,29 @@ def threshold(self): """ return self._threshold + @property + def both(self): + """Allows to connect both input to the operator. + + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + Parameters + ---------- + my_both : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.scoping_low_pass() + >>> op.inputs.both.connect(my_both) + >>> # or + >>> op.inputs.both(my_both) + """ + return self._both + class OutputsScopingLowPass(_Outputs): """Intermediate class used to get outputs from diff --git a/ansys/dpf/core/operators/filter/signed_scoping_high_pass.py b/ansys/dpf/core/operators/filter/signed_scoping_high_pass.py new file mode 100644 index 00000000000..c5a3f733adc --- /dev/null +++ b/ansys/dpf/core/operators/filter/signed_scoping_high_pass.py @@ -0,0 +1,277 @@ +""" +signed_scoping_high_pass +======================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class signed_scoping_high_pass(Operator): + """The high pass filter returns all the values superior or equal in + absolute value to the threshold value in input. + + Parameters + ---------- + field : Field or FieldsContainer + Field or fields container with only one field + is expected + threshold : float or Field + A threshold scalar or a field containing one + value is expected + both : bool, optional + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.filter.signed_scoping_high_pass() + + >>> # Make input connections + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.filter.signed_scoping_high_pass( + ... field=my_field, + ... threshold=my_threshold, + ... both=my_both, + ... ) + + >>> # Get output data + >>> result_scoping = op.outputs.scoping() + """ + + def __init__(self, field=None, threshold=None, both=None, config=None, server=None): + super().__init__( + name="core::scoping::signed_high_pass", config=config, server=server + ) + self._inputs = InputsSignedScopingHighPass(self) + self._outputs = OutputsSignedScopingHighPass(self) + if field is not None: + self.inputs.field.connect(field) + if threshold is not None: + self.inputs.threshold.connect(threshold) + if both is not None: + self.inputs.both.connect(both) + + @staticmethod + def _spec(): + description = """The high pass filter returns all the values superior or equal in + absolute value to the threshold value in input.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field", "fields_container"], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="threshold", + type_names=["double", "field"], + optional=False, + document="""A threshold scalar or a field containing one + value is expected""", + ), + 2: PinSpecification( + name="both", + type_names=["bool"], + optional=True, + document="""Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="scoping", + type_names=["scoping"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="core::scoping::signed_high_pass", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsSignedScopingHighPass + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsSignedScopingHighPass + """ + return super().outputs + + +class InputsSignedScopingHighPass(_Inputs): + """Intermediate class used to connect user inputs to + signed_scoping_high_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.signed_scoping_high_pass() + >>> my_field = dpf.Field() + >>> op.inputs.field.connect(my_field) + >>> my_threshold = float() + >>> op.inputs.threshold.connect(my_threshold) + >>> my_both = bool() + >>> op.inputs.both.connect(my_both) + """ + + def __init__(self, op: Operator): + super().__init__(signed_scoping_high_pass._spec().inputs, op) + self._field = Input(signed_scoping_high_pass._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._field) + self._threshold = Input( + signed_scoping_high_pass._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._threshold) + self._both = Input(signed_scoping_high_pass._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._both) + + @property + def field(self): + """Allows to connect field input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_field : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.signed_scoping_high_pass() + >>> op.inputs.field.connect(my_field) + >>> # or + >>> op.inputs.field(my_field) + """ + return self._field + + @property + def threshold(self): + """Allows to connect threshold input to the operator. + + A threshold scalar or a field containing one + value is expected + + Parameters + ---------- + my_threshold : float or Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.signed_scoping_high_pass() + >>> op.inputs.threshold.connect(my_threshold) + >>> # or + >>> op.inputs.threshold(my_threshold) + """ + return self._threshold + + @property + def both(self): + """Allows to connect both input to the operator. + + Bool(optional, default false) if set to true, + the complement of the filtered fields + container is returned on output pin + #1 + + Parameters + ---------- + my_both : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.signed_scoping_high_pass() + >>> op.inputs.both.connect(my_both) + >>> # or + >>> op.inputs.both(my_both) + """ + return self._both + + +class OutputsSignedScopingHighPass(_Outputs): + """Intermediate class used to get outputs from + signed_scoping_high_pass operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.signed_scoping_high_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_scoping = op.outputs.scoping() + """ + + def __init__(self, op: Operator): + super().__init__(signed_scoping_high_pass._spec().outputs, op) + self._scoping = Output(signed_scoping_high_pass._spec().output_pin(0), 0, op) + self._outputs.append(self._scoping) + + @property + def scoping(self): + """Allows to get scoping output of the operator + + Returns + ---------- + my_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.filter.signed_scoping_high_pass() + >>> # Connect inputs : op.inputs. ... + >>> result_scoping = op.outputs.scoping() + """ # noqa: E501 + return self._scoping diff --git a/ansys/dpf/core/operators/geo/__init__.py b/ansys/dpf/core/operators/geo/__init__.py index d35f97faac4..27f602e87b1 100644 --- a/ansys/dpf/core/operators/geo/__init__.py +++ b/ansys/dpf/core/operators/geo/__init__.py @@ -9,9 +9,5 @@ from .elements_facets_surfaces_over_time import elements_facets_surfaces_over_time from .elements_volume import elements_volume from .element_nodal_contribution import element_nodal_contribution -from .center_of_gravity import center_of_gravity from .integrate_over_elements import integrate_over_elements -from .mass import mass -from .moment_of_inertia import moment_of_inertia from .normals import normals -from .prepare_mapping_workflow import prepare_mapping_workflow diff --git a/ansys/dpf/core/operators/geo/center_of_gravity.py b/ansys/dpf/core/operators/geo/center_of_gravity.py deleted file mode 100644 index 0a4c13ff091..00000000000 --- a/ansys/dpf/core/operators/geo/center_of_gravity.py +++ /dev/null @@ -1,290 +0,0 @@ -""" -center_of_gravity -================= -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class center_of_gravity(Operator): - """Compute the center of gravity of a set of elements - - Parameters - ---------- - mesh : MeshedRegion, optional - mesh_scoping : Scoping, optional - Mesh scoping, if not set, all the elements of - the mesh are considered. - field : Field, optional - Elemental or nodal ponderation used in - computation. - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.center_of_gravity() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.center_of_gravity( - ... mesh=my_mesh, - ... mesh_scoping=my_mesh_scoping, - ... field=my_field, - ... ) - - >>> # Get output data - >>> result_field = op.outputs.field() - >>> result_mesh = op.outputs.mesh() - """ - - def __init__( - self, mesh=None, mesh_scoping=None, field=None, config=None, server=None - ): - super().__init__( - name="topology::center_of_gravity", config=config, server=server - ) - self._inputs = InputsCenterOfGravity(self) - self._outputs = OutputsCenterOfGravity(self) - if mesh is not None: - self.inputs.mesh.connect(mesh) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if field is not None: - self.inputs.field.connect(field) - - @staticmethod - def _spec(): - description = """Compute the center of gravity of a set of elements""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scoping"], - optional=True, - document="""Mesh scoping, if not set, all the elements of - the mesh are considered.""", - ), - 2: PinSpecification( - name="field", - type_names=["field"], - optional=True, - document="""Elemental or nodal ponderation used in - computation.""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="field", - type_names=["field"], - optional=False, - document="""""", - ), - 1: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=False, - document="""Center of gravity as a mesh""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config( - name="topology::center_of_gravity", server=server - ) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsCenterOfGravity - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsCenterOfGravity - """ - return super().outputs - - -class InputsCenterOfGravity(_Inputs): - """Intermediate class used to connect user inputs to - center_of_gravity operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - """ - - def __init__(self, op: Operator): - super().__init__(center_of_gravity._spec().inputs, op) - self._mesh = Input(center_of_gravity._spec().input_pin(0), 0, op, -1) - self._inputs.append(self._mesh) - self._mesh_scoping = Input(center_of_gravity._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._mesh_scoping) - self._field = Input(center_of_gravity._spec().input_pin(2), 2, op, -1) - self._inputs.append(self._field) - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Mesh scoping, if not set, all the elements of - the mesh are considered. - - Parameters - ---------- - my_mesh_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def field(self): - """Allows to connect field input to the operator. - - Elemental or nodal ponderation used in - computation. - - Parameters - ---------- - my_field : Field - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() - >>> op.inputs.field.connect(my_field) - >>> # or - >>> op.inputs.field(my_field) - """ - return self._field - - -class OutputsCenterOfGravity(_Outputs): - """Intermediate class used to get outputs from - center_of_gravity operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - >>> result_mesh = op.outputs.mesh() - """ - - def __init__(self, op: Operator): - super().__init__(center_of_gravity._spec().outputs, op) - self._field = Output(center_of_gravity._spec().output_pin(0), 0, op) - self._outputs.append(self._field) - self._mesh = Output(center_of_gravity._spec().output_pin(1), 1, op) - self._outputs.append(self._mesh) - - @property - def field(self): - """Allows to get field output of the operator - - Returns - ---------- - my_field : Field - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ # noqa: E501 - return self._field - - @property - def mesh(self): - """Allows to get mesh output of the operator - - Returns - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.center_of_gravity() - >>> # Connect inputs : op.inputs. ... - >>> result_mesh = op.outputs.mesh() - """ # noqa: E501 - return self._mesh diff --git a/ansys/dpf/core/operators/geo/mass.py b/ansys/dpf/core/operators/geo/mass.py deleted file mode 100644 index 9bf463816af..00000000000 --- a/ansys/dpf/core/operators/geo/mass.py +++ /dev/null @@ -1,259 +0,0 @@ -""" -mass -==== -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class mass(Operator): - """Compute the mass of a set of elements. - - Parameters - ---------- - mesh : MeshedRegion, optional - mesh_scoping : Scoping, optional - Mesh scoping, if not set, all the elements of - the mesh are considered. - field : Field, optional - Elemental or nodal ponderation used in - computation. - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.geo.mass() - - >>> # Make input connections - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.mass( - ... mesh=my_mesh, - ... mesh_scoping=my_mesh_scoping, - ... field=my_field, - ... ) - - >>> # Get output data - >>> result_field = op.outputs.field() - """ - - def __init__( - self, mesh=None, mesh_scoping=None, field=None, config=None, server=None - ): - super().__init__(name="topology::mass", config=config, server=server) - self._inputs = InputsMass(self) - self._outputs = OutputsMass(self) - if mesh is not None: - self.inputs.mesh.connect(mesh) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if field is not None: - self.inputs.field.connect(field) - - @staticmethod - def _spec(): - description = """Compute the mass of a set of elements.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scoping"], - optional=True, - document="""Mesh scoping, if not set, all the elements of - the mesh are considered.""", - ), - 2: PinSpecification( - name="field", - type_names=["field"], - optional=True, - document="""Elemental or nodal ponderation used in - computation.""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="field", - type_names=["field"], - optional=False, - document="""""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="topology::mass", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsMass - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsMass - """ - return super().outputs - - -class InputsMass(_Inputs): - """Intermediate class used to connect user inputs to - mass operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - >>> my_mesh_scoping = dpf.Scoping() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_field = dpf.Field() - >>> op.inputs.field.connect(my_field) - """ - - def __init__(self, op: Operator): - super().__init__(mass._spec().inputs, op) - self._mesh = Input(mass._spec().input_pin(0), 0, op, -1) - self._inputs.append(self._mesh) - self._mesh_scoping = Input(mass._spec().input_pin(1), 1, op, -1) - self._inputs.append(self._mesh_scoping) - self._field = Input(mass._spec().input_pin(2), 2, op, -1) - self._inputs.append(self._field) - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Mesh scoping, if not set, all the elements of - the mesh are considered. - - Parameters - ---------- - my_mesh_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def field(self): - """Allows to connect field input to the operator. - - Elemental or nodal ponderation used in - computation. - - Parameters - ---------- - my_field : Field - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() - >>> op.inputs.field.connect(my_field) - >>> # or - >>> op.inputs.field(my_field) - """ - return self._field - - -class OutputsMass(_Outputs): - """Intermediate class used to get outputs from - mass operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ - - def __init__(self, op: Operator): - super().__init__(mass._spec().outputs, op) - self._field = Output(mass._spec().output_pin(0), 0, op) - self._outputs.append(self._field) - - @property - def field(self): - """Allows to get field output of the operator - - Returns - ---------- - my_field : Field - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.mass() - >>> # Connect inputs : op.inputs. ... - >>> result_field = op.outputs.field() - """ # noqa: E501 - return self._field diff --git a/ansys/dpf/core/operators/mapping/__init__.py b/ansys/dpf/core/operators/mapping/__init__.py index 54ff34c4251..fc9cfc1b125 100644 --- a/ansys/dpf/core/operators/mapping/__init__.py +++ b/ansys/dpf/core/operators/mapping/__init__.py @@ -3,3 +3,4 @@ from .on_coordinates import on_coordinates from .scoping_on_coordinates import scoping_on_coordinates from .solid_to_skin import solid_to_skin +from .prepare_mapping_workflow import prepare_mapping_workflow diff --git a/ansys/dpf/core/operators/geo/prepare_mapping_workflow.py b/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py similarity index 94% rename from ansys/dpf/core/operators/geo/prepare_mapping_workflow.py rename to ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py index d793dad39d2..d3a135a6ff1 100644 --- a/ansys/dpf/core/operators/geo/prepare_mapping_workflow.py +++ b/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py @@ -28,7 +28,7 @@ class prepare_mapping_workflow(Operator): >>> from ansys.dpf import core as dpf >>> # Instantiate operator - >>> op = dpf.operators.geo.prepare_mapping_workflow() + >>> op = dpf.operators.mapping.prepare_mapping_workflow() >>> # Make input connections >>> my_input_support = dpf.Field() @@ -41,7 +41,7 @@ class prepare_mapping_workflow(Operator): >>> op.inputs.influence_box.connect(my_influence_box) >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.geo.prepare_mapping_workflow( + >>> op = dpf.operators.mapping.prepare_mapping_workflow( ... input_support=my_input_support, ... output_support=my_output_support, ... filter_radius=my_filter_radius, @@ -160,7 +160,7 @@ class InputsPrepareMappingWorkflow(_Inputs): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.prepare_mapping_workflow() + >>> op = dpf.operators.mapping.prepare_mapping_workflow() >>> my_input_support = dpf.Field() >>> op.inputs.input_support.connect(my_input_support) >>> my_output_support = dpf.Field() @@ -201,7 +201,7 @@ def input_support(self): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.prepare_mapping_workflow() + >>> op = dpf.operators.mapping.prepare_mapping_workflow() >>> op.inputs.input_support.connect(my_input_support) >>> # or >>> op.inputs.input_support(my_input_support) @@ -219,7 +219,7 @@ def output_support(self): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.prepare_mapping_workflow() + >>> op = dpf.operators.mapping.prepare_mapping_workflow() >>> op.inputs.output_support.connect(my_output_support) >>> # or >>> op.inputs.output_support(my_output_support) @@ -239,7 +239,7 @@ def filter_radius(self): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.prepare_mapping_workflow() + >>> op = dpf.operators.mapping.prepare_mapping_workflow() >>> op.inputs.filter_radius.connect(my_filter_radius) >>> # or >>> op.inputs.filter_radius(my_filter_radius) @@ -257,7 +257,7 @@ def influence_box(self): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.prepare_mapping_workflow() + >>> op = dpf.operators.mapping.prepare_mapping_workflow() >>> op.inputs.influence_box.connect(my_influence_box) >>> # or >>> op.inputs.influence_box(my_influence_box) @@ -272,7 +272,7 @@ class OutputsPrepareMappingWorkflow(_Outputs): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.prepare_mapping_workflow() + >>> op = dpf.operators.mapping.prepare_mapping_workflow() >>> # Connect inputs : op.inputs. ... >>> result_mapping_workflow = op.outputs.mapping_workflow() """ @@ -295,7 +295,7 @@ def mapping_workflow(self): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.geo.prepare_mapping_workflow() + >>> op = dpf.operators.mapping.prepare_mapping_workflow() >>> # Connect inputs : op.inputs. ... >>> result_mapping_workflow = op.outputs.mapping_workflow() """ # noqa: E501 diff --git a/ansys/dpf/core/operators/math/__init__.py b/ansys/dpf/core/operators/math/__init__.py index 71e8f341837..9831f25306a 100644 --- a/ansys/dpf/core/operators/math/__init__.py +++ b/ansys/dpf/core/operators/math/__init__.py @@ -1,3 +1,4 @@ +from .cross_product_fc import cross_product_fc from .minus import minus from .cplx_multiply import cplx_multiply from .unit_convert import unit_convert @@ -33,6 +34,7 @@ from .exponential_fc import exponential_fc from .ln import ln from .ln_fc import ln_fc +from .cross_product import cross_product from .component_wise_divide_fc import component_wise_divide_fc from .kronecker_prod import kronecker_prod from .real_part import real_part @@ -41,7 +43,6 @@ from .amplitude import amplitude from .cplx_dot import cplx_dot from .cplx_divide import cplx_divide -from .dot import dot from .cplx_derive import cplx_derive from .polar_to_cplx import polar_to_cplx from .amplitude_fc import amplitude_fc @@ -51,7 +52,9 @@ from .modulus import modulus from .accumulate_fc import accumulate_fc from .generalized_inner_product import generalized_inner_product +from .outer_product import outer_product from .overall_dot import overall_dot +from .dot import dot from .dot_tensor import dot_tensor from .scale_by_field import scale_by_field from .scale_by_field_fc import scale_by_field_fc @@ -60,73 +63,15 @@ from .accumulate_level_over_label_fc import accumulate_level_over_label_fc from .accumulate_over_label_fc import accumulate_over_label_fc from .average_over_label_fc import average_over_label_fc +from .min_max_over_time import min_max_over_time from .correlation import correlation from .make_one_on_comp import make_one_on_comp from .entity_extractor import entity_extractor from .modal_superposition import modal_superposition from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval -from .fft_gradient_eval import fft_gradient_eval -from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax -from .svd import svd -from .matrix_inverse import matrix_inverse -from .qr_solve import qr_solve -from .fft_eval import fft_eval from .fft_gradient_eval import fft_gradient_eval +from .fft_approx import fft_approx from .fft_multi_harmonic_minmax import fft_multi_harmonic_minmax from .svd import svd diff --git a/ansys/dpf/core/operators/math/cross_product.py b/ansys/dpf/core/operators/math/cross_product.py new file mode 100644 index 00000000000..0437a59be1c --- /dev/null +++ b/ansys/dpf/core/operators/math/cross_product.py @@ -0,0 +1,235 @@ +""" +cross_product +============= +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class cross_product(Operator): + """Computes the cross product of two vector fields. Fields could have the + same location or Elemental-Nodal and Nodal. + + Parameters + ---------- + fieldA : Field or FieldsContainer or float + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.cross_product() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.cross_product( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, fieldA=None, fieldB=None, config=None, server=None): + super().__init__(name="cross_product", config=config, server=server) + self._inputs = InputsCrossProduct(self) + self._outputs = OutputsCrossProduct(self) + if fieldA is not None: + self.inputs.fieldA.connect(fieldA) + if fieldB is not None: + self.inputs.fieldB.connect(fieldB) + + @staticmethod + def _spec(): + description = """Computes the cross product of two vector fields. Fields could have the + same location or Elemental-Nodal and Nodal.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="cross_product", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsCrossProduct + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsCrossProduct + """ + return super().outputs + + +class InputsCrossProduct(_Inputs): + """Intermediate class used to connect user inputs to + cross_product operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + """ + + def __init__(self, op: Operator): + super().__init__(cross_product._spec().inputs, op) + self._fieldA = Input(cross_product._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fieldA) + self._fieldB = Input(cross_product._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._fieldB) + + @property + def fieldA(self): + """Allows to connect fieldA input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_fieldA : Field or FieldsContainer or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> # or + >>> op.inputs.fieldA(my_fieldA) + """ + return self._fieldA + + @property + def fieldB(self): + """Allows to connect fieldB input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_fieldB : Field or FieldsContainer or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> # or + >>> op.inputs.fieldB(my_fieldB) + """ + return self._fieldB + + +class OutputsCrossProduct(_Outputs): + """Intermediate class used to get outputs from + cross_product operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(cross_product._spec().outputs, op) + self._field = Output(cross_product._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/math/cross_product_fc.py b/ansys/dpf/core/operators/math/cross_product_fc.py new file mode 100644 index 00000000000..3036aff3ff4 --- /dev/null +++ b/ansys/dpf/core/operators/math/cross_product_fc.py @@ -0,0 +1,245 @@ +""" +cross_product_fc +================ +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class cross_product_fc(Operator): + """Computes the cross product of two vector fields. Fields could have the + same location or Elemental-Nodal and Nodal. + + Parameters + ---------- + field_or_fields_container_A : Field or FieldsContainer or float + Field or fields container with only one field + is expected + field_or_fields_container_B : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.cross_product_fc() + + >>> # Make input connections + >>> my_field_or_fields_container_A = dpf.Field() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> my_field_or_fields_container_B = dpf.Field() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.cross_product_fc( + ... field_or_fields_container_A=my_field_or_fields_container_A, + ... field_or_fields_container_B=my_field_or_fields_container_B, + ... ) + + >>> # Get output data + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__( + self, + field_or_fields_container_A=None, + field_or_fields_container_B=None, + config=None, + server=None, + ): + super().__init__(name="cross_product_fc", config=config, server=server) + self._inputs = InputsCrossProductFc(self) + self._outputs = OutputsCrossProductFc(self) + if field_or_fields_container_A is not None: + self.inputs.field_or_fields_container_A.connect(field_or_fields_container_A) + if field_or_fields_container_B is not None: + self.inputs.field_or_fields_container_B.connect(field_or_fields_container_B) + + @staticmethod + def _spec(): + description = """Computes the cross product of two vector fields. Fields could have the + same location or Elemental-Nodal and Nodal.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="field_or_fields_container_A", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="field_or_fields_container_B", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="cross_product_fc", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsCrossProductFc + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsCrossProductFc + """ + return super().outputs + + +class InputsCrossProductFc(_Inputs): + """Intermediate class used to connect user inputs to + cross_product_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product_fc() + >>> my_field_or_fields_container_A = dpf.Field() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> my_field_or_fields_container_B = dpf.Field() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + """ + + def __init__(self, op: Operator): + super().__init__(cross_product_fc._spec().inputs, op) + self._field_or_fields_container_A = Input( + cross_product_fc._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._field_or_fields_container_A) + self._field_or_fields_container_B = Input( + cross_product_fc._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._field_or_fields_container_B) + + @property + def field_or_fields_container_A(self): + """Allows to connect field_or_fields_container_A input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_field_or_fields_container_A : Field or FieldsContainer or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product_fc() + >>> op.inputs.field_or_fields_container_A.connect(my_field_or_fields_container_A) + >>> # or + >>> op.inputs.field_or_fields_container_A(my_field_or_fields_container_A) + """ + return self._field_or_fields_container_A + + @property + def field_or_fields_container_B(self): + """Allows to connect field_or_fields_container_B input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_field_or_fields_container_B : Field or FieldsContainer or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product_fc() + >>> op.inputs.field_or_fields_container_B.connect(my_field_or_fields_container_B) + >>> # or + >>> op.inputs.field_or_fields_container_B(my_field_or_fields_container_B) + """ + return self._field_or_fields_container_B + + +class OutputsCrossProductFc(_Outputs): + """Intermediate class used to get outputs from + cross_product_fc operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(cross_product_fc._spec().outputs, op) + self._fields_container = Output(cross_product_fc._spec().output_pin(0), 0, op) + self._outputs.append(self._fields_container) + + @property + def fields_container(self): + """Allows to get fields_container output of the operator + + Returns + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.cross_product_fc() + >>> # Connect inputs : op.inputs. ... + >>> result_fields_container = op.outputs.fields_container() + """ # noqa: E501 + return self._fields_container diff --git a/ansys/dpf/core/operators/math/fft_approx.py b/ansys/dpf/core/operators/math/fft_approx.py new file mode 100644 index 00000000000..5db77ac939b --- /dev/null +++ b/ansys/dpf/core/operators/math/fft_approx.py @@ -0,0 +1,454 @@ +""" +fft_approx +========== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class fft_approx(Operator): + """Computes the fitting curve using FFT filtering and cubic fitting in + space (node i: x=time, y=data), with possibility to compute the + first and the second derivatives of the curve. + + Parameters + ---------- + time_scoping : Scoping, optional + A time scoping to rescope / split an iunput + fields container + mesh_scoping : Scoping or ScopingsContainer, optional + A space (mesh entities) scopings (or + container) to rescope / split an + input fields container + entity_to_fit : FieldsContainer + component_number : int + Component number as an int, ex '0' for + x-displacement, '1' for + y-displacement,... + first_derivative : bool, optional + Calculate the first derivative? (bool): + default is false + second_derivative : bool, optional + Calculate the second derivative? (bool): + default is false + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.fft_approx() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_entity_to_fit = dpf.FieldsContainer() + >>> op.inputs.entity_to_fit.connect(my_entity_to_fit) + >>> my_component_number = int() + >>> op.inputs.component_number.connect(my_component_number) + >>> my_first_derivative = bool() + >>> op.inputs.first_derivative.connect(my_first_derivative) + >>> my_second_derivative = bool() + >>> op.inputs.second_derivative.connect(my_second_derivative) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.fft_approx( + ... time_scoping=my_time_scoping, + ... mesh_scoping=my_mesh_scoping, + ... entity_to_fit=my_entity_to_fit, + ... component_number=my_component_number, + ... first_derivative=my_first_derivative, + ... second_derivative=my_second_derivative, + ... ) + + >>> # Get output data + >>> result_fitted_entity_y = op.outputs.fitted_entity_y() + >>> result_first_der_dy = op.outputs.first_der_dy() + >>> result_second_der_dy = op.outputs.second_der_dy() + """ + + def __init__( + self, + time_scoping=None, + mesh_scoping=None, + entity_to_fit=None, + component_number=None, + first_derivative=None, + second_derivative=None, + config=None, + server=None, + ): + super().__init__(name="fft_approx", config=config, server=server) + self._inputs = InputsFftApprox(self) + self._outputs = OutputsFftApprox(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if entity_to_fit is not None: + self.inputs.entity_to_fit.connect(entity_to_fit) + if component_number is not None: + self.inputs.component_number.connect(component_number) + if first_derivative is not None: + self.inputs.first_derivative.connect(first_derivative) + if second_derivative is not None: + self.inputs.second_derivative.connect(second_derivative) + + @staticmethod + def _spec(): + description = """Computes the fitting curve using FFT filtering and cubic fitting in + space (node i: x=time, y=data), with possibility to + compute the first and the second derivatives of the curve.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["vector", "scoping"], + optional=True, + document="""A time scoping to rescope / split an iunput + fields container""", + ), + 1: PinSpecification( + name="mesh_scoping", + type_names=["umap", "scoping", "scopings_container"], + optional=True, + document="""A space (mesh entities) scopings (or + container) to rescope / split an + input fields container""", + ), + 2: PinSpecification( + name="entity_to_fit", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 3: PinSpecification( + name="component_number", + type_names=["int32"], + optional=False, + document="""Component number as an int, ex '0' for + x-displacement, '1' for + y-displacement,...""", + ), + 4: PinSpecification( + name="first_derivative", + type_names=["bool"], + optional=True, + document="""Calculate the first derivative? (bool): + default is false""", + ), + 5: PinSpecification( + name="second_derivative", + type_names=["bool"], + optional=True, + document="""Calculate the second derivative? (bool): + default is false""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="fitted_entity_y", + type_names=["fields_container"], + optional=False, + document="""The fitted entity is fitted using fft along + the space scoping (node i: x=time, + y=data): fitted y is expected to be + close to the input data""", + ), + 1: PinSpecification( + name="first_der_dy", + type_names=["fields_container"], + optional=False, + document="""The first derivative (dy) from the fitted y""", + ), + 2: PinSpecification( + name="second_der_dy", + type_names=["fields_container"], + optional=False, + document="""The second derivative (d2y) from the fitted y""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="fft_approx", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsFftApprox + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsFftApprox + """ + return super().outputs + + +class InputsFftApprox(_Inputs): + """Intermediate class used to connect user inputs to + fft_approx operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_entity_to_fit = dpf.FieldsContainer() + >>> op.inputs.entity_to_fit.connect(my_entity_to_fit) + >>> my_component_number = int() + >>> op.inputs.component_number.connect(my_component_number) + >>> my_first_derivative = bool() + >>> op.inputs.first_derivative.connect(my_first_derivative) + >>> my_second_derivative = bool() + >>> op.inputs.second_derivative.connect(my_second_derivative) + """ + + def __init__(self, op: Operator): + super().__init__(fft_approx._spec().inputs, op) + self._time_scoping = Input(fft_approx._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._mesh_scoping = Input(fft_approx._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh_scoping) + self._entity_to_fit = Input(fft_approx._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._entity_to_fit) + self._component_number = Input(fft_approx._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._component_number) + self._first_derivative = Input(fft_approx._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._first_derivative) + self._second_derivative = Input(fft_approx._spec().input_pin(5), 5, op, -1) + self._inputs.append(self._second_derivative) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + A time scoping to rescope / split an iunput + fields container + + Parameters + ---------- + my_time_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + A space (mesh entities) scopings (or + container) to rescope / split an + input fields container + + Parameters + ---------- + my_mesh_scoping : Scoping or ScopingsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def entity_to_fit(self): + """Allows to connect entity_to_fit input to the operator. + + Parameters + ---------- + my_entity_to_fit : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> op.inputs.entity_to_fit.connect(my_entity_to_fit) + >>> # or + >>> op.inputs.entity_to_fit(my_entity_to_fit) + """ + return self._entity_to_fit + + @property + def component_number(self): + """Allows to connect component_number input to the operator. + + Component number as an int, ex '0' for + x-displacement, '1' for + y-displacement,... + + Parameters + ---------- + my_component_number : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> op.inputs.component_number.connect(my_component_number) + >>> # or + >>> op.inputs.component_number(my_component_number) + """ + return self._component_number + + @property + def first_derivative(self): + """Allows to connect first_derivative input to the operator. + + Calculate the first derivative? (bool): + default is false + + Parameters + ---------- + my_first_derivative : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> op.inputs.first_derivative.connect(my_first_derivative) + >>> # or + >>> op.inputs.first_derivative(my_first_derivative) + """ + return self._first_derivative + + @property + def second_derivative(self): + """Allows to connect second_derivative input to the operator. + + Calculate the second derivative? (bool): + default is false + + Parameters + ---------- + my_second_derivative : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> op.inputs.second_derivative.connect(my_second_derivative) + >>> # or + >>> op.inputs.second_derivative(my_second_derivative) + """ + return self._second_derivative + + +class OutputsFftApprox(_Outputs): + """Intermediate class used to get outputs from + fft_approx operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> # Connect inputs : op.inputs. ... + >>> result_fitted_entity_y = op.outputs.fitted_entity_y() + >>> result_first_der_dy = op.outputs.first_der_dy() + >>> result_second_der_dy = op.outputs.second_der_dy() + """ + + def __init__(self, op: Operator): + super().__init__(fft_approx._spec().outputs, op) + self._fitted_entity_y = Output(fft_approx._spec().output_pin(0), 0, op) + self._outputs.append(self._fitted_entity_y) + self._first_der_dy = Output(fft_approx._spec().output_pin(1), 1, op) + self._outputs.append(self._first_der_dy) + self._second_der_dy = Output(fft_approx._spec().output_pin(2), 2, op) + self._outputs.append(self._second_der_dy) + + @property + def fitted_entity_y(self): + """Allows to get fitted_entity_y output of the operator + + Returns + ---------- + my_fitted_entity_y : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> # Connect inputs : op.inputs. ... + >>> result_fitted_entity_y = op.outputs.fitted_entity_y() + """ # noqa: E501 + return self._fitted_entity_y + + @property + def first_der_dy(self): + """Allows to get first_der_dy output of the operator + + Returns + ---------- + my_first_der_dy : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> # Connect inputs : op.inputs. ... + >>> result_first_der_dy = op.outputs.first_der_dy() + """ # noqa: E501 + return self._first_der_dy + + @property + def second_der_dy(self): + """Allows to get second_der_dy output of the operator + + Returns + ---------- + my_second_der_dy : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.fft_approx() + >>> # Connect inputs : op.inputs. ... + >>> result_second_der_dy = op.outputs.second_der_dy() + """ # noqa: E501 + return self._second_der_dy diff --git a/ansys/dpf/core/operators/math/outer_product.py b/ansys/dpf/core/operators/math/outer_product.py new file mode 100644 index 00000000000..9e2cf2e8882 --- /dev/null +++ b/ansys/dpf/core/operators/math/outer_product.py @@ -0,0 +1,233 @@ +""" +outer_product +============= +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class outer_product(Operator): + """Computes the outer product of two vector fields + + Parameters + ---------- + fieldA : Field or FieldsContainer or float + Field or fields container with only one field + is expected + fieldB : Field or FieldsContainer or float + Field or fields container with only one field + is expected + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.math.outer_product() + + >>> # Make input connections + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.math.outer_product( + ... fieldA=my_fieldA, + ... fieldB=my_fieldB, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, fieldA=None, fieldB=None, config=None, server=None): + super().__init__(name="outer_product", config=config, server=server) + self._inputs = InputsOuterProduct(self) + self._outputs = OutputsOuterProduct(self) + if fieldA is not None: + self.inputs.fieldA.connect(fieldA) + if fieldB is not None: + self.inputs.fieldB.connect(fieldB) + + @staticmethod + def _spec(): + description = """Computes the outer product of two vector fields""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fieldA", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + 1: PinSpecification( + name="fieldB", + type_names=[ + "field", + "fields_container", + "double", + "vector", + ], + optional=False, + document="""Field or fields container with only one field + is expected""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="outer_product", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsOuterProduct + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsOuterProduct + """ + return super().outputs + + +class InputsOuterProduct(_Inputs): + """Intermediate class used to connect user inputs to + outer_product operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.outer_product() + >>> my_fieldA = dpf.Field() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> my_fieldB = dpf.Field() + >>> op.inputs.fieldB.connect(my_fieldB) + """ + + def __init__(self, op: Operator): + super().__init__(outer_product._spec().inputs, op) + self._fieldA = Input(outer_product._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._fieldA) + self._fieldB = Input(outer_product._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._fieldB) + + @property + def fieldA(self): + """Allows to connect fieldA input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_fieldA : Field or FieldsContainer or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.outer_product() + >>> op.inputs.fieldA.connect(my_fieldA) + >>> # or + >>> op.inputs.fieldA(my_fieldA) + """ + return self._fieldA + + @property + def fieldB(self): + """Allows to connect fieldB input to the operator. + + Field or fields container with only one field + is expected + + Parameters + ---------- + my_fieldB : Field or FieldsContainer or float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.outer_product() + >>> op.inputs.fieldB.connect(my_fieldB) + >>> # or + >>> op.inputs.fieldB(my_fieldB) + """ + return self._fieldB + + +class OutputsOuterProduct(_Outputs): + """Intermediate class used to get outputs from + outer_product operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.outer_product() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(outer_product._spec().outputs, op) + self._field = Output(outer_product._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.math.outer_product() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/math/unit_convert.py b/ansys/dpf/core/operators/math/unit_convert.py index 369a58c6ead..66a14d9755a 100644 --- a/ansys/dpf/core/operators/math/unit_convert.py +++ b/ansys/dpf/core/operators/math/unit_convert.py @@ -18,9 +18,9 @@ class unit_convert(Operator): Parameters ---------- entity_to_convert : Field or FieldsContainer or MeshedRegion or MeshesContainer - unit_name : str + unit_name : str or int Unit as a string, ex 'm' for meter, 'pa' for - pascal,... + pascal,... or ansys unit system's id Examples @@ -77,10 +77,10 @@ def _spec(): ), 1: PinSpecification( name="unit_name", - type_names=["string"], + type_names=["string", "int32"], optional=False, document="""Unit as a string, ex 'm' for meter, 'pa' for - pascal,...""", + pascal,... or ansys unit system's id""", ), }, map_output_pin_spec={ @@ -181,11 +181,11 @@ def unit_name(self): """Allows to connect unit_name input to the operator. Unit as a string, ex 'm' for meter, 'pa' for - pascal,... + pascal,... or ansys unit system's id Parameters ---------- - my_unit_name : str + my_unit_name : str or int Examples -------- diff --git a/ansys/dpf/core/operators/mesh/__init__.py b/ansys/dpf/core/operators/mesh/__init__.py index cc138c04343..938869368b0 100644 --- a/ansys/dpf/core/operators/mesh/__init__.py +++ b/ansys/dpf/core/operators/mesh/__init__.py @@ -4,8 +4,9 @@ from .meshes_provider import meshes_provider from .beam_properties import beam_properties from .split_mesh import split_mesh -from .from_scoping import from_scoping from .split_fields import split_fields +from .change_cs import change_cs +from .from_scoping import from_scoping from .points_from_coordinates import points_from_coordinates from .mesh_clip import mesh_clip from .make_sphere_levelset import make_sphere_levelset diff --git a/ansys/dpf/core/operators/mesh/change_cs.py b/ansys/dpf/core/operators/mesh/change_cs.py new file mode 100644 index 00000000000..b69f2c1ad4d --- /dev/null +++ b/ansys/dpf/core/operators/mesh/change_cs.py @@ -0,0 +1,217 @@ +""" +change_cs +========= +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class change_cs(Operator): + """Apply a transformation (rotation and displacement) matrix on a mesh or + meshes container. + + Parameters + ---------- + meshes : MeshedRegion or MeshesContainer + coordinate_system : Field + 3-3 rotation matrix + 3 translations (x, y, + z) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.mesh.change_cs() + + >>> # Make input connections + >>> my_meshes = dpf.MeshedRegion() + >>> op.inputs.meshes.connect(my_meshes) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.mesh.change_cs( + ... meshes=my_meshes, + ... coordinate_system=my_coordinate_system, + ... ) + + >>> # Get output data + >>> result_meshed_region = op.outputs.meshed_region() + """ + + def __init__(self, meshes=None, coordinate_system=None, config=None, server=None): + super().__init__(name="mesh::change_cs", config=config, server=server) + self._inputs = InputsChangeCs(self) + self._outputs = OutputsChangeCs(self) + if meshes is not None: + self.inputs.meshes.connect(meshes) + if coordinate_system is not None: + self.inputs.coordinate_system.connect(coordinate_system) + + @staticmethod + def _spec(): + description = """Apply a transformation (rotation and displacement) matrix on a mesh or + meshes container.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="meshes", + type_names=["meshed_region", "meshes_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="coordinate_system", + type_names=["field"], + optional=False, + document="""3-3 rotation matrix + 3 translations (x, y, + z)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="meshed_region", + type_names=["meshed_region", "meshes_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="mesh::change_cs", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsChangeCs + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsChangeCs + """ + return super().outputs + + +class InputsChangeCs(_Inputs): + """Intermediate class used to connect user inputs to + change_cs operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.change_cs() + >>> my_meshes = dpf.MeshedRegion() + >>> op.inputs.meshes.connect(my_meshes) + >>> my_coordinate_system = dpf.Field() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) + """ + + def __init__(self, op: Operator): + super().__init__(change_cs._spec().inputs, op) + self._meshes = Input(change_cs._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._meshes) + self._coordinate_system = Input(change_cs._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._coordinate_system) + + @property + def meshes(self): + """Allows to connect meshes input to the operator. + + Parameters + ---------- + my_meshes : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.change_cs() + >>> op.inputs.meshes.connect(my_meshes) + >>> # or + >>> op.inputs.meshes(my_meshes) + """ + return self._meshes + + @property + def coordinate_system(self): + """Allows to connect coordinate_system input to the operator. + + 3-3 rotation matrix + 3 translations (x, y, + z) + + Parameters + ---------- + my_coordinate_system : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.change_cs() + >>> op.inputs.coordinate_system.connect(my_coordinate_system) + >>> # or + >>> op.inputs.coordinate_system(my_coordinate_system) + """ + return self._coordinate_system + + +class OutputsChangeCs(_Outputs): + """Intermediate class used to get outputs from + change_cs operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.change_cs() + >>> # Connect inputs : op.inputs. ... + >>> result_meshed_region = op.outputs.meshed_region() + """ + + def __init__(self, op: Operator): + super().__init__(change_cs._spec().outputs, op) + self.meshed_region_as_meshed_region = Output( + _modify_output_spec_with_one_type( + change_cs._spec().output_pin(0), "meshed_region" + ), + 0, + op, + ) + self._outputs.append(self.meshed_region_as_meshed_region) + self.meshed_region_as_meshes_container = Output( + _modify_output_spec_with_one_type( + change_cs._spec().output_pin(0), "meshes_container" + ), + 0, + op, + ) + self._outputs.append(self.meshed_region_as_meshes_container) diff --git a/ansys/dpf/core/operators/mesh/skin.py b/ansys/dpf/core/operators/mesh/skin.py index 654b3bcfe74..b7f21c6d699 100644 --- a/ansys/dpf/core/operators/mesh/skin.py +++ b/ansys/dpf/core/operators/mesh/skin.py @@ -44,6 +44,7 @@ class skin(Operator): >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() >>> result_map_new_elements_to_old = op.outputs.map_new_elements_to_old() >>> result_property_field_new_elements_to_old = op.outputs.property_field_new_elements_to_old() + >>> result_facet_indices = op.outputs.facet_indices() """ def __init__(self, mesh=None, mesh_scoping=None, config=None, server=None): @@ -100,7 +101,24 @@ def _spec(): name="property_field_new_elements_to_old", type_names=["property_field"], optional=False, - document="""""", + document="""This property field gives, for each new face + element id (in the scoping) the + corresponding 3d volume element index + (in the data) it has been extracted + from. the 3d volume element id can be + found with the element scoping of the + input mesh.""", + ), + 4: PinSpecification( + name="facet_indices", + type_names=["property_field"], + optional=False, + document="""This property field fives, for each new face + element id (in the scoping) the + corresponding face index on the + source 3d volume element. the 3d + volume element can be extracted from + the previous output.""", ), }, ) @@ -214,6 +232,7 @@ class OutputsSkin(_Outputs): >>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping() >>> result_map_new_elements_to_old = op.outputs.map_new_elements_to_old() >>> result_property_field_new_elements_to_old = op.outputs.property_field_new_elements_to_old() + >>> result_facet_indices = op.outputs.facet_indices() """ def __init__(self, op: Operator): @@ -228,6 +247,8 @@ def __init__(self, op: Operator): skin._spec().output_pin(3), 3, op ) self._outputs.append(self._property_field_new_elements_to_old) + self._facet_indices = Output(skin._spec().output_pin(4), 4, op) + self._outputs.append(self._facet_indices) @property def mesh(self): @@ -296,3 +317,20 @@ def property_field_new_elements_to_old(self): >>> result_property_field_new_elements_to_old = op.outputs.property_field_new_elements_to_old() """ # noqa: E501 return self._property_field_new_elements_to_old + + @property + def facet_indices(self): + """Allows to get facet_indices output of the operator + + Returns + ---------- + my_facet_indices : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.mesh.skin() + >>> # Connect inputs : op.inputs. ... + >>> result_facet_indices = op.outputs.facet_indices() + """ # noqa: E501 + return self._facet_indices diff --git a/ansys/dpf/core/operators/metadata/__init__.py b/ansys/dpf/core/operators/metadata/__init__.py index c0e5a0ba921..c3cf7fcf340 100644 --- a/ansys/dpf/core/operators/metadata/__init__.py +++ b/ansys/dpf/core/operators/metadata/__init__.py @@ -7,5 +7,6 @@ from .boundary_condition_provider import boundary_condition_provider from .is_cyclic import is_cyclic from .material_support_provider import material_support_provider +from .property_field_provider_by_name import property_field_provider_by_name from .cyclic_mesh_expansion import cyclic_mesh_expansion from .cyclic_support_provider import cyclic_support_provider diff --git a/ansys/dpf/core/operators/metadata/collection.py b/ansys/dpf/core/operators/metadata/collection.py deleted file mode 100644 index 56448bd5d44..00000000000 --- a/ansys/dpf/core/operators/metadata/collection.py +++ /dev/null @@ -1,588 +0,0 @@ -""" -Collection -=========== -Contains classes associated with the DPF collection. - -""" -import abc -import warnings -import traceback - -import numpy as np - -from ansys.dpf.core.server_types import BaseServer -from ansys.dpf.core.scoping import Scoping -from ansys.dpf.core.time_freq_support import TimeFreqSupport -from ansys.dpf.core import server as server_module -from ansys.dpf.gate import ( - collection_capi, - collection_grpcapi, - label_space_capi, - label_space_grpcapi, - data_processing_capi, - data_processing_grpcapi, - object_handler, - dpf_vector, - dpf_array -) - - -class Collection: - """Represents a collection of entries ordered by labels and IDs. - - Parameters - ---------- - dpf_type : - - collection : ansys.grpc.dpf.collection_pb2.Collection, optional - Collection to create from the collection message. The default is ``None``. - server : server.DPFServer, optional - Server with the channel connected to the remote or local instance. The - default is ``None``, in which case an attempt is made to use the global - server. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> coll = dpf.Collection(dpf.types.field) - - """ - - def __init__(self, collection=None, - server: BaseServer = None): - # step 1: get server - self._server = server_module.get_or_create_server(server) - - # step2: if object exists, take the instance, else create it - self._internal_obj = None - if collection is not None: - if isinstance(collection, Collection): - self._server = collection._server - core_api = self._server.get_api_for_type( - capi=data_processing_capi.DataProcessingCAPI, - grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI - ) - core_api.init_data_processing_environment(self) - self._internal_obj = core_api.data_processing_duplicate_object_reference(collection) - else: - self._internal_obj = collection - self.owned = False - - @property - def _server(self): - return self._server_instance - - @_server.setter - def _server(self, value): - self._server_instance = value - # step 2: get api - self._api = self._server.get_api_for_type(capi=collection_capi.CollectionCAPI, - grpcapi=collection_grpcapi.CollectionGRPCAPI) - # step3: init environment - self._api.init_collection_environment(self) # creates stub when gRPC - - @abc.abstractmethod - def create_subtype(self, obj_by_copy): - pass - - @staticmethod - def integral_collection(inpt, server: BaseServer = None): - """Creates a collection of integral type with a list. - - The collection of integral is the equivalent of an array of - data sent server side. It can be used to efficiently stream - large data to the server. - - Parameters - ---------- - inpt : list[float], list[int], numpy.array - list to transfer server side - - Returns - ------- - IntegralCollection - - Notes - ----- - Used by default by the ``'Operator'`` and the``'Workflow'`` when a - list is connected or returned. - - """ - if isinstance(inpt, np.ndarray): - inpt = inpt.flatten() - if all(isinstance(x, (int, np.int32)) for x in inpt): - return IntCollection(inpt, server=server) - if all(isinstance(x, (float, np.float)) for x in inpt): - return FloatCollection(inpt, server=server) - else: - raise NotImplementedError(f"{IntegralCollection.__name__} is only " - "implemented for int and float values " - f"and not {type(inpt[0]).__name__}") - - def set_labels(self, labels): - """Set labels for scoping the collection. - - Parameters - ---------- - labels : list[str], optional - Labels to scope entries to. For example, ``["time", "complex"]``. - - """ - current_labels = self.labels - if len(current_labels) != 0: - print( - "The collection already has labels :", - current_labels, - "deleting existing labels is not implemented yet.", - ) - return - for label in labels: - self.add_label(label) - - def add_label(self, label, default_value=None): - """Add the requested label to scope the collection. - - Parameters - ---------- - label : str - Labels to scope the entries to. For example, ``"time"``. - - default_value : int, optional - Default value for existing fields in the collection. The default - is ``None``. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> coll = dpf.FieldsContainer() - >>> coll.add_label('time') - - """ - if default_value is not None: - self._api.collection_add_label_with_default_value(self, label, default_value) - else: - self._api.collection_add_label(self, label) - - def _get_labels(self): - """Retrieve labels scoping the collection. - - Returns - ------- - labels: list[str] - List of labels that entries are scoped to. For example, ``["time", "complex"]``. - """ - num = self._api.collection_get_num_labels(self) - out = [] - for i in range(0, num): - out.append(self._api.collection_get_label(self, i)) - return out - - labels = property(_get_labels, set_labels, "labels") - - def has_label(self, label): - """Check if a collection has a specified label. - - Parameters - ---------- - label: str - Label to search for. For example, ``"time"``. - - Returns - ------- - bool - ``True`` when successful, ``False`` when failed. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> coll = dpf.FieldsContainer() - >>> coll.add_label('time') - >>> coll.has_label('time') - True - - >>> coll.has_label('complex') - False - - """ - return label in self.labels - - def _get_entries(self, label_space_or_index): - """Retrieve the entries at a requested label space or index. - - Parameters - ---------- - label_space_or_index : dict[str,int] - Label space or index. For example, - ``{"time": 1, "complex": 0}`` or the index of the field. - - Returns - ------- - entries : list[Scoping], list[Field], list[MeshedRegion] - Entries corresponding to the request. - """ - if isinstance(label_space_or_index, dict): - client_label_space = self._create_client_label_space(label_space_or_index) - num = self._api.collection_get_num_obj_for_label_space(self, client_label_space) - out = [] - for i in range(0, num): - out.append(self.create_subtype( - self._api.collection_get_obj_by_index_for_label_space( - self, client_label_space, i))) - return out - else: - return self.create_subtype( - self._api.collection_get_obj_by_index(self, label_space_or_index) - ) - - def _get_entry(self, label_space_or_index): - """Retrieve the entry at a requested label space or index. - - Parameters - ---------- - label_space_or_index : dict[str,int] - Label space or index of the requested entry. For example, - ``{"time": 1, "complex": 0}`` or the index of the field. - - Returns - ------- - entry : Scoping, Field, MeshedRegion - Entry at the requested label space or index. - """ - entries = self._get_entries(label_space_or_index) - if isinstance(entries, list): - if len(entries) == 1: - return entries[0] - elif len(entries) == 0: - return None - else: - raise KeyError(f"{label_space_or_index} has {len(entries)} entries") - else: - return entries - - def get_label_space(self, index): - """Retrieve the label space of an entry at a requested index. - - Parameters - ---------- - index: int - Index of the entry. - - Returns - ------- - label_space : dict(str:int) - Scoping of the requested entry. For example, - ``{"time": 1, "complex": 0}``. - """ - return self._create_dict_from_client_label_space( - self._api.collection_get_obj_label_space_by_index(self, index) - ) - - def get_available_ids_for_label(self, label="time"): - """Retrieve the IDs assigned to an input label. - - Parameters - ---------- - label : str - Name of the input label. The default is ``"time"``. - - Returns - ------- - ids : list[int] - List of IDs assigned to the input label. - """ - return self.get_label_scoping(label)._get_ids(False) - - def get_label_scoping(self, label="time"): - """Retrieve the scoping for an input label. - - This method allows you to retrieve a list of IDs for a given input label in the - collection. For example, if the label ``el_type`` exists in the collection, you - can use the `get_lable_scoping` method to retrieve a list of IDS with this label. - You can then use these IDs to request a given entity inside the collection. - - Parameters - ---------- - label: str - Name of the input label. - - Returns - ------- - scoping: Scoping - IDs scoped to the input label. - """ - scoping = Scoping(self._api.collection_get_label_scoping(self, label), server=self._server) - return scoping - - def __getitem__(self, index): - """Retrieves the entry at a requested index value. - - Parameters - ---------- - index : int - Index value. - - Returns - ------- - entry : Field , Scoping - Entry at the index value. - """ - self_len = len(self) - if index < 0: - # convert to a positive index - index = self_len + index - - if not self_len: - raise IndexError("This collection contains no items") - if index >= self_len: - raise IndexError(f"This collection contains only {self_len} entrie(s)") - - return self._get_entries(index) - - @property - def _label_space_api(self): - return self._server.get_api_for_type(capi=label_space_capi.LabelSpaceCAPI, - grpcapi=label_space_grpcapi.LabelSpaceGRPCAPI) - - @property - def _data_processing_core_api(self): - core_api = self._server.get_api_for_type( - capi=data_processing_capi.DataProcessingCAPI, - grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI) - core_api.init_data_processing_environment(self) - return core_api - - def _create_client_label_space(self, label_space): - client_label_space = object_handler.ObjHandler( - self._data_processing_core_api, - self._label_space_api.label_space_new_for_object(self) - ) - for key, id in label_space.items(): - self._label_space_api.label_space_add_data(client_label_space, key, id) - return client_label_space - - def _create_dict_from_client_label_space(self, client_label_space): - if isinstance(client_label_space, dict): - return client_label_space - out = {} - client_label_space = object_handler.ObjHandler( - self._data_processing_core_api, client_label_space - ) - for i in range(0, self._label_space_api.label_space_get_size(client_label_space)): - out[self._label_space_api.label_space_get_labels_name(client_label_space, i)] = \ - self._label_space_api.label_space_get_labels_value(client_label_space, i) - return out - - def _add_entry(self, label_space, entry): - """Update or add an entry at a requested label space. - - parameters - ---------- - label_space : list[str,int] - Label space of the requested fields. For example, ``{"time":1, "complex":0}``. - entry : Field or Scoping - DPF entry to add. - """ - client_label_space = self._create_client_label_space(label_space) - self._api.collection_add_entry(self, client_label_space, entry) - - def _get_time_freq_support(self): - """Retrieve time frequency support. - - Returns - ------- - time_freq_support : TimeFreqSupport - """ - from ansys.dpf.gate import support_capi, support_grpcapi, object_handler, \ - data_processing_capi, data_processing_grpcapi - data_api = self._server.get_api_for_type( - capi=data_processing_capi.DataProcessingCAPI, - grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI) - support = object_handler.ObjHandler( - data_processing_api=data_api, - internal_obj=self._api.collection_get_support(self, "time"), - server=self._server) - support_api = self._server.get_api_for_type( - capi=support_capi.SupportCAPI, - grpcapi=support_grpcapi.SupportGRPCAPI - ) - time_freq = support_api.support_get_as_time_freq_support(support) - res = TimeFreqSupport(time_freq_support=time_freq, server=self._server) - return res - - def _set_time_freq_support(self, time_freq_support): - """Set the time frequency support of the collection.""" - self._api.collection_set_support(self, "time", time_freq_support) - - def __str__(self): - """Describe the entity. - - Returns - ------- - description : str - Description of the entity. - """ - from ansys.dpf.core.core import _description - return _description(self._internal_obj, self._server) - - def __len__(self): - """Retrieve the number of entries.""" - return self._api.collection_get_size(self) - - def __del__(self): - """Delete the entry.""" - try: - # delete - if not self.owned: - self._deleter_func[0](self._deleter_func[1](self)) - except: - warnings.warn(traceback.format_exc()) - - def _get_ownership(self): - self.owned = True - return self._internal_obj - - def __iter__(self): - for i in range(len(self)): - yield self[i] - - -class IntegralCollection(Collection): - """Creates a collection of integral type with a list. - - The collection of integral is the equivalent of an array of - data sent server side. It can be used to efficiently stream - large data to the server. - - Parameters - ---------- - list : list[float], list[int], numpy.array - list to transfer server side - - Notes - ----- - Used by default by the ``'Operator'`` and the``'Workflow'`` when a - list is connected or returned. - """ - - def __init__(self, server=None, collection=None): - super().__init__(server=server, collection=collection) - - @abc.abstractmethod - def create_subtype(self, obj_by_copy): - pass - - @abc.abstractmethod - def _set_integral_entries(self, input): - pass - - def get_integral_entries(self): - pass - - -class IntCollection(Collection): - """Creates a collection of integers with a list. - - The collection of integral is the equivalent of an array of - data sent server side. It can be used to efficiently stream - large data to the server. - - Parameters - ---------- - list : list[int], numpy.array - list to transfer server side - - Notes - ----- - Used by default by the ``'Operator'`` and the``'Workflow'`` when a - list is connected or returned. - """ - - def __init__(self, list=None, server=None, collection=None): - super().__init__(server=server, collection=collection) - if self._internal_obj is None: - if self._server.has_client(): - self._internal_obj = self._api.collection_of_int_new_on_client(self._server.client) - else: - self._internal_obj = self._api.collection_of_int_new() - if list is not None: - self._set_integral_entries(list) - - def create_subtype(self, obj_by_copy): - return int(obj_by_copy) - - def _set_integral_entries(self, input): - dtype = np.int32 - if isinstance(input, range): - input = np.array(list(input), dtype=dtype) - elif not isinstance(input, (np.ndarray, np.generic)): - input = np.array(input, dtype=dtype) - else: - input = np.array(list(input), dtype=dtype) - - self._api.collection_set_data_as_int(self, input, input.size) - - def get_integral_entries(self): - try: - vec = dpf_vector.DPFVectorInt(client=self._server.client) - self._api.collection_get_data_as_int_for_dpf_vector( - self, vec, vec.internal_data, vec.internal_size - ) - return dpf_array.DPFArray(vec) - except NotImplementedError: - return self._api.collection_get_data_as_int(self, 0) - - -class FloatCollection(Collection): - """Creates a collection of floats (double64) with a list. - - The collection of integral is the equivalent of an array of - data sent server side. It can be used to efficiently stream - large data to the server. - - Parameters - ---------- - list : list[float], numpy.array - list to transfer server side - - Notes - ----- - Used by default by the ``'Operator'`` and the``'Workflow'`` when a - list is connected or returned. - """ - - def __init__(self, list=None, server=None, collection=None): - super().__init__(server=server, collection=collection) - self._sub_type = float - if self._internal_obj is None: - if self._server.has_client(): - self._internal_obj = self._api.collection_of_double_new_on_client( - self._server.client - ) - else: - self._internal_obj = self._api.collection_of_double_new() - if list is not None: - self._set_integral_entries(list) - - def create_subtype(self, obj_by_copy): - return float(obj_by_copy) - - def _set_integral_entries(self, input): - dtype = np.float - if isinstance(input, range): - input = np.array(list(input), dtype=dtype) - elif not isinstance(input, (np.ndarray, np.generic)): - input = np.array(input, dtype=dtype) - else: - input = np.array(list(input), dtype=dtype) - - self._api.collection_set_data_as_double(self, input, input.size) - - def get_integral_entries(self): - try: - vec = dpf_vector.DPFVectorDouble(client=self._server.client) - self._api.collection_get_data_as_double_for_dpf_vector( - self, vec, vec.internal_data, vec.internal_size - ) - return dpf_array.DPFArray(vec) - except NotImplementedError: - return self._api.collection_get_data_as_double(self, 0) diff --git a/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py b/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py new file mode 100644 index 00000000000..5a6ccdf907d --- /dev/null +++ b/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py @@ -0,0 +1,339 @@ +""" +property_field_provider_by_name +=============================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class property_field_provider_by_name(Operator): + """Provides the property values for a set of elements for a defined + property name. + + Parameters + ---------- + mesh_scoping : Scoping, optional + Scoping that defines the set of elements to + fetch the property values for. if not + specified, applied on all the + elements of the mesh. + streams_container : StreamsContainer, optional + Optional if using a datasources + data_sources : DataSources + Optional if using a streamscontainer + property_name : str + Property to read, that can be the following: + elements_connectivity, + nodes_connectivity, material, + element_type, mapdl_element_type, + harmonic_index, step, substep, + keyopt_i (i = 1 -> 18). + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.metadata.property_field_provider_by_name() + + >>> # Make input connections + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_property_name = str() + >>> op.inputs.property_name.connect(my_property_name) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.metadata.property_field_provider_by_name( + ... mesh_scoping=my_mesh_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... property_name=my_property_name, + ... ) + + >>> # Get output data + >>> result_property_field = op.outputs.property_field() + """ + + def __init__( + self, + mesh_scoping=None, + streams_container=None, + data_sources=None, + property_name=None, + config=None, + server=None, + ): + super().__init__( + name="property_field_provider_by_name", config=config, server=server + ) + self._inputs = InputsPropertyFieldProviderByName(self) + self._outputs = OutputsPropertyFieldProviderByName(self) + if mesh_scoping is not None: + self.inputs.mesh_scoping.connect(mesh_scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if property_name is not None: + self.inputs.property_name.connect(property_name) + + @staticmethod + def _spec(): + description = """Provides the property values for a set of elements for a defined + property name.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 1: PinSpecification( + name="mesh_scoping", + type_names=["scoping"], + optional=True, + document="""Scoping that defines the set of elements to + fetch the property values for. if not + specified, applied on all the + elements of the mesh.""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Optional if using a datasources""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Optional if using a streamscontainer""", + ), + 13: PinSpecification( + name="property_name", + type_names=["string"], + optional=False, + document="""Property to read, that can be the following: + elements_connectivity, + nodes_connectivity, material, + element_type, mapdl_element_type, + harmonic_index, step, substep, + keyopt_i (i = 1 -> 18).""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="property_field", + type_names=["property_field"], + optional=False, + document="""Property field""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="property_field_provider_by_name", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsPropertyFieldProviderByName + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsPropertyFieldProviderByName + """ + return super().outputs + + +class InputsPropertyFieldProviderByName(_Inputs): + """Intermediate class used to connect user inputs to + property_field_provider_by_name operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.property_field_provider_by_name() + >>> my_mesh_scoping = dpf.Scoping() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_property_name = str() + >>> op.inputs.property_name.connect(my_property_name) + """ + + def __init__(self, op: Operator): + super().__init__(property_field_provider_by_name._spec().inputs, op) + self._mesh_scoping = Input( + property_field_provider_by_name._spec().input_pin(1), 1, op, -1 + ) + self._inputs.append(self._mesh_scoping) + self._streams_container = Input( + property_field_provider_by_name._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._streams_container) + self._data_sources = Input( + property_field_provider_by_name._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._data_sources) + self._property_name = Input( + property_field_provider_by_name._spec().input_pin(13), 13, op, -1 + ) + self._inputs.append(self._property_name) + + @property + def mesh_scoping(self): + """Allows to connect mesh_scoping input to the operator. + + Scoping that defines the set of elements to + fetch the property values for. if not + specified, applied on all the + elements of the mesh. + + Parameters + ---------- + my_mesh_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.property_field_provider_by_name() + >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> # or + >>> op.inputs.mesh_scoping(my_mesh_scoping) + """ + return self._mesh_scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Optional if using a datasources + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.property_field_provider_by_name() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Optional if using a streamscontainer + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.property_field_provider_by_name() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def property_name(self): + """Allows to connect property_name input to the operator. + + Property to read, that can be the following: + elements_connectivity, + nodes_connectivity, material, + element_type, mapdl_element_type, + harmonic_index, step, substep, + keyopt_i (i = 1 -> 18). + + Parameters + ---------- + my_property_name : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.property_field_provider_by_name() + >>> op.inputs.property_name.connect(my_property_name) + >>> # or + >>> op.inputs.property_name(my_property_name) + """ + return self._property_name + + +class OutputsPropertyFieldProviderByName(_Outputs): + """Intermediate class used to get outputs from + property_field_provider_by_name operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.property_field_provider_by_name() + >>> # Connect inputs : op.inputs. ... + >>> result_property_field = op.outputs.property_field() + """ + + def __init__(self, op: Operator): + super().__init__(property_field_provider_by_name._spec().outputs, op) + self._property_field = Output( + property_field_provider_by_name._spec().output_pin(0), 0, op + ) + self._outputs.append(self._property_field) + + @property + def property_field(self): + """Allows to get property_field output of the operator + + Returns + ---------- + my_property_field : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.metadata.property_field_provider_by_name() + >>> # Connect inputs : op.inputs. ... + >>> result_property_field = op.outputs.property_field() + """ # noqa: E501 + return self._property_field diff --git a/ansys/dpf/core/operators/min_max/__init__.py b/ansys/dpf/core/operators/min_max/__init__.py index 87650fd3571..da1a12568f4 100644 --- a/ansys/dpf/core/operators/min_max/__init__.py +++ b/ansys/dpf/core/operators/min_max/__init__.py @@ -1,12 +1,12 @@ from .min_max_by_time import min_max_by_time -from .phase_of_max import phase_of_max from .time_of_max_by_entity import time_of_max_by_entity +from .max_over_phase import max_over_phase from .min_max_by_entity import min_max_by_entity from .min_max_over_time_by_entity import min_max_over_time_by_entity from .max_over_time_by_entity import max_over_time_by_entity from .min_over_time_by_entity import min_over_time_by_entity from .time_of_min_by_entity import time_of_min_by_entity -from .max_over_phase import max_over_phase +from .phase_of_max import phase_of_max from .min_max import min_max from .min_max_fc import min_max_fc from .min_max_over_label_fc import min_max_over_label_fc diff --git a/ansys/dpf/core/operators/result/__init__.py b/ansys/dpf/core/operators/result/__init__.py index 3e37a03f164..092550443ad 100644 --- a/ansys/dpf/core/operators/result/__init__.py +++ b/ansys/dpf/core/operators/result/__init__.py @@ -4,15 +4,16 @@ from .normal_contact_force import normal_contact_force from .num_surface_status_changes import num_surface_status_changes from .joint_relative_angular_velocity import joint_relative_angular_velocity -from .elastic_strain_Y import elastic_strain_Y from .nodal_moment import nodal_moment from .elemental_mass import elemental_mass from .heat_flux import heat_flux from .co_energy import co_energy +from .elastic_strain_Y import elastic_strain_Y from .plastic_strain_principal_3 import plastic_strain_principal_3 from .electric_flux_density import electric_flux_density from .plastic_strain_principal_2 import plastic_strain_principal_2 from .normal_contact_moment import normal_contact_moment +from .thermal_strain_XZ import thermal_strain_XZ from .elastic_strain_Z import elastic_strain_Z from .stress import stress from .stress_X import stress_X @@ -51,7 +52,6 @@ from .thermal_strain_Z import thermal_strain_Z from .thermal_strain_XY import thermal_strain_XY from .thermal_strain_YZ import thermal_strain_YZ -from .thermal_strain_XZ import thermal_strain_XZ from .thermal_strain_principal_1 import thermal_strain_principal_1 from .thermal_strain_principal_2 import thermal_strain_principal_2 from .thermal_strain_principal_3 import thermal_strain_principal_3 @@ -123,31 +123,23 @@ from .members_in_compression_not_certified import members_in_compression_not_certified from .members_in_bending_not_certified import members_in_bending_not_certified from .members_in_linear_compression_bending_not_certified import members_in_linear_compression_bending_not_certified +from .erp_radiation_efficiency import erp_radiation_efficiency from .cyclic_expansion import cyclic_expansion from .equivalent_radiated_power import equivalent_radiated_power from .torque import torque -from .recombine_harmonic_indeces_cyclic import recombine_harmonic_indeces_cyclic +from .erp_accumulate_results import erp_accumulate_results from .euler_load_buckling import euler_load_buckling from .cyclic_analytic_usum_max import cyclic_analytic_usum_max from .cyclic_analytic_seqv_max import cyclic_analytic_seqv_max +from .recombine_harmonic_indeces_cyclic import recombine_harmonic_indeces_cyclic from .poynting_vector import poynting_vector from .poynting_vector_surface import poynting_vector_surface -from .nodal_to_global import nodal_to_global -from .nodal_averaged_elastic_strains import nodal_averaged_elastic_strains from .add_rigid_body_motion import add_rigid_body_motion -from .nodal_averaged_equivalent_elastic_strain import nodal_averaged_equivalent_elastic_strain +from .nodal_to_global import nodal_to_global from .run import run from .cyclic_expanded_velocity import cyclic_expanded_velocity from .cyclic_expanded_el_strain import cyclic_expanded_el_strain from .cyclic_expanded_temperature import cyclic_expanded_temperature -from .nodal_averaged_thermal_strains import nodal_averaged_thermal_strains -from .nodal_averaged_plastic_strains import nodal_averaged_plastic_strains -from .nodal_averaged_thermal_swelling_strains import nodal_averaged_thermal_swelling_strains -from .nodal_averaged_stresses import nodal_averaged_stresses -from .nodal_averaged_creep_strains import nodal_averaged_creep_strains -from .nodal_averaged_equivalent_thermal_strains import nodal_averaged_equivalent_thermal_strains -from .nodal_averaged_equivalent_plastic_strain import nodal_averaged_equivalent_plastic_strain -from .nodal_averaged_equivalent_creep_strain import nodal_averaged_equivalent_creep_strain from .nmisc import nmisc from .global_to_nodal import global_to_nodal from .euler_nodes import euler_nodes @@ -174,25 +166,24 @@ from .mapdl_material_properties import mapdl_material_properties from .mapdl_section import mapdl_section from .cyclic_expanded_heat_flux import cyclic_expanded_heat_flux -from .migrate_to_h5dpf import migrate_to_h5dpf -from .compute_total_strain_YZ import compute_total_strain_YZ -from .compute_total_strain_1 import compute_total_strain_1 -from .compute_total_strain_Y import compute_total_strain_Y +from .compute_total_strain_YZ import compute_total_strain_YZ +from .compute_total_strain_1 import compute_total_strain_1 +from .compute_total_strain_Y import compute_total_strain_Y from .compute_total_strain import compute_total_strain from .compute_stress import compute_stress -from .compute_total_strain_X import compute_total_strain_X -from .compute_total_strain_Z import compute_total_strain_Z -from .compute_total_strain_XY import compute_total_strain_XY -from .compute_total_strain_XZ import compute_total_strain_XZ -from .compute_total_strain_2 import compute_total_strain_2 -from .compute_total_strain_3 import compute_total_strain_3 -from .compute_stress_X import compute_stress_X -from .compute_stress_Y import compute_stress_Y -from .compute_stress_Z import compute_stress_Z -from .compute_stress_XY import compute_stress_XY -from .compute_stress_1 import compute_stress_1 -from .compute_stress_YZ import compute_stress_YZ -from .compute_stress_XZ import compute_stress_XZ -from .compute_stress_2 import compute_stress_2 -from .compute_stress_3 import compute_stress_3 -from .compute_stress_von_mises import compute_stress_von_mises +from .compute_total_strain_X import compute_total_strain_X +from .compute_total_strain_Z import compute_total_strain_Z +from .compute_total_strain_XY import compute_total_strain_XY +from .compute_total_strain_XZ import compute_total_strain_XZ +from .compute_total_strain_2 import compute_total_strain_2 +from .compute_total_strain_3 import compute_total_strain_3 +from .compute_stress_X import compute_stress_X +from .compute_stress_Y import compute_stress_Y +from .compute_stress_Z import compute_stress_Z +from .compute_stress_XY import compute_stress_XY +from .compute_stress_1 import compute_stress_1 +from .compute_stress_YZ import compute_stress_YZ +from .compute_stress_XZ import compute_stress_XZ +from .compute_stress_2 import compute_stress_2 +from .compute_stress_3 import compute_stress_3 +from .compute_stress_von_mises import compute_stress_von_mises diff --git a/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py b/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py index 3c803298b67..a4d1d7c1295 100644 --- a/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py +++ b/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -358,7 +358,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - accu_eqv_creep_strain._spec().input_pin(21), 21, op, -1 + accu_eqv_creep_strain._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py b/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py index ae60c4736fb..2a7390f8cfb 100644 --- a/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py +++ b/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -358,7 +358,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - accu_eqv_plastic_strain._spec().input_pin(21), 21, op, -1 + accu_eqv_plastic_strain._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/creep_strain_energy_density.py b/ansys/dpf/core/operators/result/creep_strain_energy_density.py index 8f214076044..d098d6d159c 100644 --- a/ansys/dpf/core/operators/result/creep_strain_energy_density.py +++ b/ansys/dpf/core/operators/result/creep_strain_energy_density.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -358,7 +358,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - creep_strain_energy_density._spec().input_pin(21), 21, op, -1 + creep_strain_energy_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/elastic_strain.py b/ansys/dpf/core/operators/result/elastic_strain.py index e3d944cae03..d4973506fc4 100644 --- a/ansys/dpf/core/operators/result/elastic_strain.py +++ b/ansys/dpf/core/operators/result/elastic_strain.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -343,7 +343,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(elastic_strain._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(elastic_strain._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/elastic_strain_X.py b/ansys/dpf/core/operators/result/elastic_strain_X.py index 4d94a381e49..baf7956bcf1 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_X.py +++ b/ansys/dpf/core/operators/result/elastic_strain_X.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(elastic_strain_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_X._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(elastic_strain_X._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/elastic_strain_XY.py b/ansys/dpf/core/operators/result/elastic_strain_XY.py index 3d627b84eee..5aa09edfee4 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_XY.py +++ b/ansys/dpf/core/operators/result/elastic_strain_XY.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(elastic_strain_XY._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_XY._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(elastic_strain_XY._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/elastic_strain_XZ.py b/ansys/dpf/core/operators/result/elastic_strain_XZ.py index 1108817feb3..a8005312946 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_XZ.py +++ b/ansys/dpf/core/operators/result/elastic_strain_XZ.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(elastic_strain_XZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_XZ._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(elastic_strain_XZ._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/elastic_strain_Y.py b/ansys/dpf/core/operators/result/elastic_strain_Y.py index a432bd23848..b7dc80a5f28 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_Y.py +++ b/ansys/dpf/core/operators/result/elastic_strain_Y.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(elastic_strain_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_Y._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(elastic_strain_Y._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/elastic_strain_YZ.py b/ansys/dpf/core/operators/result/elastic_strain_YZ.py index 1be2a75a156..cfa3e7b59fe 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_YZ.py +++ b/ansys/dpf/core/operators/result/elastic_strain_YZ.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(elastic_strain_YZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_YZ._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(elastic_strain_YZ._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/elastic_strain_Z.py b/ansys/dpf/core/operators/result/elastic_strain_Z.py index 8afba839bb7..173901488ce 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_Z.py +++ b/ansys/dpf/core/operators/result/elastic_strain_Z.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(elastic_strain_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(elastic_strain_Z._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(elastic_strain_Z._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/elastic_strain_energy_density.py b/ansys/dpf/core/operators/result/elastic_strain_energy_density.py index 9a5e86c882d..4139616917f 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_energy_density.py +++ b/ansys/dpf/core/operators/result/elastic_strain_energy_density.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -360,7 +360,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - elastic_strain_energy_density._spec().input_pin(21), 21, op, -1 + elastic_strain_energy_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/elastic_strain_principal_1.py b/ansys/dpf/core/operators/result/elastic_strain_principal_1.py index 5719def131c..2932e26054f 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_principal_1.py +++ b/ansys/dpf/core/operators/result/elastic_strain_principal_1.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - elastic_strain_principal_1._spec().input_pin(21), 21, op, -1 + elastic_strain_principal_1._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/elastic_strain_principal_2.py b/ansys/dpf/core/operators/result/elastic_strain_principal_2.py index e68b9a87ba9..3bfea065666 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_principal_2.py +++ b/ansys/dpf/core/operators/result/elastic_strain_principal_2.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - elastic_strain_principal_2._spec().input_pin(21), 21, op, -1 + elastic_strain_principal_2._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/elastic_strain_principal_3.py b/ansys/dpf/core/operators/result/elastic_strain_principal_3.py index a1e24d5f232..cf2219ef0fa 100644 --- a/ansys/dpf/core/operators/result/elastic_strain_principal_3.py +++ b/ansys/dpf/core/operators/result/elastic_strain_principal_3.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - elastic_strain_principal_3._spec().input_pin(21), 21, op, -1 + elastic_strain_principal_3._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/electric_field.py b/ansys/dpf/core/operators/result/electric_field.py index 8fae0106002..0d431345f86 100644 --- a/ansys/dpf/core/operators/result/electric_field.py +++ b/ansys/dpf/core/operators/result/electric_field.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -343,7 +343,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(electric_field._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(electric_field._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(electric_field._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/electric_flux_density.py b/ansys/dpf/core/operators/result/electric_flux_density.py index cc1d252d816..57d98d30788 100644 --- a/ansys/dpf/core/operators/result/electric_flux_density.py +++ b/ansys/dpf/core/operators/result/electric_flux_density.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -358,7 +358,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - electric_flux_density._spec().input_pin(21), 21, op, -1 + electric_flux_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/element_nodal_forces.py b/ansys/dpf/core/operators/result/element_nodal_forces.py index 6c1255dac00..8b2cb600a41 100644 --- a/ansys/dpf/core/operators/result/element_nodal_forces.py +++ b/ansys/dpf/core/operators/result/element_nodal_forces.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -351,7 +351,7 @@ def __init__(self, op: Operator): element_nodal_forces._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(element_nodal_forces._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(element_nodal_forces._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/element_orientations.py b/ansys/dpf/core/operators/result/element_orientations.py index 77cf7b0dbd1..a4a8c8cfa45 100644 --- a/ansys/dpf/core/operators/result/element_orientations.py +++ b/ansys/dpf/core/operators/result/element_orientations.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -351,7 +351,7 @@ def __init__(self, op: Operator): element_orientations._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(element_orientations._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(element_orientations._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/equivalent_radiated_power.py b/ansys/dpf/core/operators/result/equivalent_radiated_power.py index 95e92b2d54d..6b24e6da880 100644 --- a/ansys/dpf/core/operators/result/equivalent_radiated_power.py +++ b/ansys/dpf/core/operators/result/equivalent_radiated_power.py @@ -16,13 +16,32 @@ class equivalent_radiated_power(Operator): Parameters ---------- fields_container : FieldsContainer - abstract_meshed_region : MeshedRegion or MeshesContainer, optional - The mesh region in this pin have to be + mesh : MeshedRegion or MeshesContainer + The mesh region in this pin has to be boundary or skin mesh - time_scoping : int or Scoping, optional + time_scoping : int or Scoping Load step number (if it's specified, the erp is computed only on the substeps of this step) or time scoping + mass_density : float + Mass density (if it's not specified, default + value of the air is applied). + speed_of_sound : float + Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied). + erp_type : int + If this pin is set to 0, the classical erp is + computed, 1 the corrected erp is + computed (a mesh of one face has to + be given in the pin 1) and 2 the + enhanced erp is computed. default is + 0. + boolean : bool + If this pin is set to true, the erp level in + db is computed + factor : float + Erp reference value. default is 1e-12 Examples @@ -35,16 +54,31 @@ class equivalent_radiated_power(Operator): >>> # Make input connections >>> my_fields_container = dpf.FieldsContainer() >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abstract_meshed_region = dpf.MeshedRegion() - >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) >>> my_time_scoping = int() >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mass_density = float() + >>> op.inputs.mass_density.connect(my_mass_density) + >>> my_speed_of_sound = float() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) + >>> my_erp_type = int() + >>> op.inputs.erp_type.connect(my_erp_type) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.result.equivalent_radiated_power( ... fields_container=my_fields_container, - ... abstract_meshed_region=my_abstract_meshed_region, + ... mesh=my_mesh, ... time_scoping=my_time_scoping, + ... mass_density=my_mass_density, + ... speed_of_sound=my_speed_of_sound, + ... erp_type=my_erp_type, + ... boolean=my_boolean, + ... factor=my_factor, ... ) >>> # Get output data @@ -54,8 +88,13 @@ class equivalent_radiated_power(Operator): def __init__( self, fields_container=None, - abstract_meshed_region=None, + mesh=None, time_scoping=None, + mass_density=None, + speed_of_sound=None, + erp_type=None, + boolean=None, + factor=None, config=None, server=None, ): @@ -64,10 +103,20 @@ def __init__( self._outputs = OutputsEquivalentRadiatedPower(self) if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if abstract_meshed_region is not None: - self.inputs.abstract_meshed_region.connect(abstract_meshed_region) + if mesh is not None: + self.inputs.mesh.connect(mesh) if time_scoping is not None: self.inputs.time_scoping.connect(time_scoping) + if mass_density is not None: + self.inputs.mass_density.connect(mass_density) + if speed_of_sound is not None: + self.inputs.speed_of_sound.connect(speed_of_sound) + if erp_type is not None: + self.inputs.erp_type.connect(erp_type) + if boolean is not None: + self.inputs.boolean.connect(boolean) + if factor is not None: + self.inputs.factor.connect(factor) @staticmethod def _spec(): @@ -82,20 +131,59 @@ def _spec(): document="""""", ), 1: PinSpecification( - name="abstract_meshed_region", + name="mesh", type_names=["abstract_meshed_region", "meshes_container"], - optional=True, - document="""The mesh region in this pin have to be + optional=False, + document="""The mesh region in this pin has to be boundary or skin mesh""", ), 2: PinSpecification( name="time_scoping", type_names=["int32", "vector", "scoping"], - optional=True, + optional=False, document="""Load step number (if it's specified, the erp is computed only on the substeps of this step) or time scoping""", ), + 3: PinSpecification( + name="mass_density", + type_names=["double"], + optional=False, + document="""Mass density (if it's not specified, default + value of the air is applied).""", + ), + 4: PinSpecification( + name="speed_of_sound", + type_names=["double"], + optional=False, + document="""Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied).""", + ), + 5: PinSpecification( + name="erp_type", + type_names=["int32"], + optional=False, + document="""If this pin is set to 0, the classical erp is + computed, 1 the corrected erp is + computed (a mesh of one face has to + be given in the pin 1) and 2 the + enhanced erp is computed. default is + 0.""", + ), + 6: PinSpecification( + name="boolean", + type_names=["bool"], + optional=False, + document="""If this pin is set to true, the erp level in + db is computed""", + ), + 7: PinSpecification( + name="factor", + type_names=["double"], + optional=False, + document="""Erp reference value. default is 1e-12""", + ), }, map_output_pin_spec={ 0: PinSpecification( @@ -155,10 +243,20 @@ class InputsEquivalentRadiatedPower(_Inputs): >>> op = dpf.operators.result.equivalent_radiated_power() >>> my_fields_container = dpf.FieldsContainer() >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_abstract_meshed_region = dpf.MeshedRegion() - >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) >>> my_time_scoping = int() >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mass_density = float() + >>> op.inputs.mass_density.connect(my_mass_density) + >>> my_speed_of_sound = float() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) + >>> my_erp_type = int() + >>> op.inputs.erp_type.connect(my_erp_type) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) """ def __init__(self, op: Operator): @@ -167,14 +265,28 @@ def __init__(self, op: Operator): equivalent_radiated_power._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._abstract_meshed_region = Input( - equivalent_radiated_power._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._abstract_meshed_region) + self._mesh = Input(equivalent_radiated_power._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh) self._time_scoping = Input( equivalent_radiated_power._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._time_scoping) + self._mass_density = Input( + equivalent_radiated_power._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._mass_density) + self._speed_of_sound = Input( + equivalent_radiated_power._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._speed_of_sound) + self._erp_type = Input( + equivalent_radiated_power._spec().input_pin(5), 5, op, -1 + ) + self._inputs.append(self._erp_type) + self._boolean = Input(equivalent_radiated_power._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._boolean) + self._factor = Input(equivalent_radiated_power._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._factor) @property def fields_container(self): @@ -195,25 +307,25 @@ def fields_container(self): return self._fields_container @property - def abstract_meshed_region(self): - """Allows to connect abstract_meshed_region input to the operator. + def mesh(self): + """Allows to connect mesh input to the operator. - The mesh region in this pin have to be + The mesh region in this pin has to be boundary or skin mesh Parameters ---------- - my_abstract_meshed_region : MeshedRegion or MeshesContainer + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf >>> op = dpf.operators.result.equivalent_radiated_power() - >>> op.inputs.abstract_meshed_region.connect(my_abstract_meshed_region) + >>> op.inputs.mesh.connect(my_mesh) >>> # or - >>> op.inputs.abstract_meshed_region(my_abstract_meshed_region) + >>> op.inputs.mesh(my_mesh) """ - return self._abstract_meshed_region + return self._mesh @property def time_scoping(self): @@ -237,6 +349,115 @@ def time_scoping(self): """ return self._time_scoping + @property + def mass_density(self): + """Allows to connect mass_density input to the operator. + + Mass density (if it's not specified, default + value of the air is applied). + + Parameters + ---------- + my_mass_density : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_radiated_power() + >>> op.inputs.mass_density.connect(my_mass_density) + >>> # or + >>> op.inputs.mass_density(my_mass_density) + """ + return self._mass_density + + @property + def speed_of_sound(self): + """Allows to connect speed_of_sound input to the operator. + + Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied). + + Parameters + ---------- + my_speed_of_sound : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_radiated_power() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) + >>> # or + >>> op.inputs.speed_of_sound(my_speed_of_sound) + """ + return self._speed_of_sound + + @property + def erp_type(self): + """Allows to connect erp_type input to the operator. + + If this pin is set to 0, the classical erp is + computed, 1 the corrected erp is + computed (a mesh of one face has to + be given in the pin 1) and 2 the + enhanced erp is computed. default is + 0. + + Parameters + ---------- + my_erp_type : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_radiated_power() + >>> op.inputs.erp_type.connect(my_erp_type) + >>> # or + >>> op.inputs.erp_type(my_erp_type) + """ + return self._erp_type + + @property + def boolean(self): + """Allows to connect boolean input to the operator. + + If this pin is set to true, the erp level in + db is computed + + Parameters + ---------- + my_boolean : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_radiated_power() + >>> op.inputs.boolean.connect(my_boolean) + >>> # or + >>> op.inputs.boolean(my_boolean) + """ + return self._boolean + + @property + def factor(self): + """Allows to connect factor input to the operator. + + Erp reference value. default is 1e-12 + + Parameters + ---------- + my_factor : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.equivalent_radiated_power() + >>> op.inputs.factor.connect(my_factor) + >>> # or + >>> op.inputs.factor(my_factor) + """ + return self._factor + class OutputsEquivalentRadiatedPower(_Outputs): """Intermediate class used to get outputs from diff --git a/ansys/dpf/core/operators/result/eqv_stress_parameter.py b/ansys/dpf/core/operators/result/eqv_stress_parameter.py index f81905083ef..c92bb191e0e 100644 --- a/ansys/dpf/core/operators/result/eqv_stress_parameter.py +++ b/ansys/dpf/core/operators/result/eqv_stress_parameter.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -351,7 +351,7 @@ def __init__(self, op: Operator): eqv_stress_parameter._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(eqv_stress_parameter._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(eqv_stress_parameter._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/erp_accumulate_results.py b/ansys/dpf/core/operators/result/erp_accumulate_results.py new file mode 100644 index 00000000000..41e072c2571 --- /dev/null +++ b/ansys/dpf/core/operators/result/erp_accumulate_results.py @@ -0,0 +1,494 @@ +""" +erp_accumulate_results +====================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class erp_accumulate_results(Operator): + """Compute the Equivalent Radiated Power (ERP) by panels and sum over the + panels + + Parameters + ---------- + fields_container : FieldsContainer + mesh : MeshedRegion or MeshesContainer + The meshes region in this pin has to be + boundary or skin mesh + time_scoping : int or Scoping + Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping + mass_density : float + Mass density (if it's not specified, default + value of the air is applied). + speed_of_sound : float + Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied). + erp_type : int + If this pin is set to 0, the classical erp is + computed, 1 the corrected erp is + computed (a mesh of one face has to + be given in the pin 1) and 2 the + enhanced erp is computed. default is + 0. + boolean : bool + If this pin is set to true, the erp level in + db is computed + factor : float + Erp reference value. default is 1e-12 + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.result.erp_accumulate_results() + + >>> # Make input connections + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_time_scoping = int() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mass_density = float() + >>> op.inputs.mass_density.connect(my_mass_density) + >>> my_speed_of_sound = float() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) + >>> my_erp_type = int() + >>> op.inputs.erp_type.connect(my_erp_type) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.result.erp_accumulate_results( + ... fields_container=my_fields_container, + ... mesh=my_mesh, + ... time_scoping=my_time_scoping, + ... mass_density=my_mass_density, + ... speed_of_sound=my_speed_of_sound, + ... erp_type=my_erp_type, + ... boolean=my_boolean, + ... factor=my_factor, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + fields_container=None, + mesh=None, + time_scoping=None, + mass_density=None, + speed_of_sound=None, + erp_type=None, + boolean=None, + factor=None, + config=None, + server=None, + ): + super().__init__(name="erp_accumulate_results", config=config, server=server) + self._inputs = InputsErpAccumulateResults(self) + self._outputs = OutputsErpAccumulateResults(self) + if fields_container is not None: + self.inputs.fields_container.connect(fields_container) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mass_density is not None: + self.inputs.mass_density.connect(mass_density) + if speed_of_sound is not None: + self.inputs.speed_of_sound.connect(speed_of_sound) + if erp_type is not None: + self.inputs.erp_type.connect(erp_type) + if boolean is not None: + self.inputs.boolean.connect(boolean) + if factor is not None: + self.inputs.factor.connect(factor) + + @staticmethod + def _spec(): + description = """Compute the Equivalent Radiated Power (ERP) by panels and sum over the + panels""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=False, + document="""The meshes region in this pin has to be + boundary or skin mesh""", + ), + 2: PinSpecification( + name="time_scoping", + type_names=["int32", "vector", "scoping"], + optional=False, + document="""Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping""", + ), + 3: PinSpecification( + name="mass_density", + type_names=["double"], + optional=False, + document="""Mass density (if it's not specified, default + value of the air is applied).""", + ), + 4: PinSpecification( + name="speed_of_sound", + type_names=["double"], + optional=False, + document="""Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied).""", + ), + 5: PinSpecification( + name="erp_type", + type_names=["int32"], + optional=False, + document="""If this pin is set to 0, the classical erp is + computed, 1 the corrected erp is + computed (a mesh of one face has to + be given in the pin 1) and 2 the + enhanced erp is computed. default is + 0.""", + ), + 6: PinSpecification( + name="boolean", + type_names=["bool"], + optional=False, + document="""If this pin is set to true, the erp level in + db is computed""", + ), + 7: PinSpecification( + name="factor", + type_names=["double"], + optional=False, + document="""Erp reference value. default is 1e-12""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="erp_accumulate_results", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsErpAccumulateResults + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsErpAccumulateResults + """ + return super().outputs + + +class InputsErpAccumulateResults(_Inputs): + """Intermediate class used to connect user inputs to + erp_accumulate_results operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> my_fields_container = dpf.FieldsContainer() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_time_scoping = int() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mass_density = float() + >>> op.inputs.mass_density.connect(my_mass_density) + >>> my_speed_of_sound = float() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) + >>> my_erp_type = int() + >>> op.inputs.erp_type.connect(my_erp_type) + >>> my_boolean = bool() + >>> op.inputs.boolean.connect(my_boolean) + >>> my_factor = float() + >>> op.inputs.factor.connect(my_factor) + """ + + def __init__(self, op: Operator): + super().__init__(erp_accumulate_results._spec().inputs, op) + self._fields_container = Input( + erp_accumulate_results._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._fields_container) + self._mesh = Input(erp_accumulate_results._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh) + self._time_scoping = Input( + erp_accumulate_results._spec().input_pin(2), 2, op, -1 + ) + self._inputs.append(self._time_scoping) + self._mass_density = Input( + erp_accumulate_results._spec().input_pin(3), 3, op, -1 + ) + self._inputs.append(self._mass_density) + self._speed_of_sound = Input( + erp_accumulate_results._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._speed_of_sound) + self._erp_type = Input(erp_accumulate_results._spec().input_pin(5), 5, op, -1) + self._inputs.append(self._erp_type) + self._boolean = Input(erp_accumulate_results._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._boolean) + self._factor = Input(erp_accumulate_results._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._factor) + + @property + def fields_container(self): + """Allows to connect fields_container input to the operator. + + Parameters + ---------- + my_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> op.inputs.fields_container.connect(my_fields_container) + >>> # or + >>> op.inputs.fields_container(my_fields_container) + """ + return self._fields_container + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + The meshes region in this pin has to be + boundary or skin mesh + + Parameters + ---------- + my_mesh : MeshedRegion or MeshesContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping + + Parameters + ---------- + my_time_scoping : int or Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def mass_density(self): + """Allows to connect mass_density input to the operator. + + Mass density (if it's not specified, default + value of the air is applied). + + Parameters + ---------- + my_mass_density : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> op.inputs.mass_density.connect(my_mass_density) + >>> # or + >>> op.inputs.mass_density(my_mass_density) + """ + return self._mass_density + + @property + def speed_of_sound(self): + """Allows to connect speed_of_sound input to the operator. + + Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied). + + Parameters + ---------- + my_speed_of_sound : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) + >>> # or + >>> op.inputs.speed_of_sound(my_speed_of_sound) + """ + return self._speed_of_sound + + @property + def erp_type(self): + """Allows to connect erp_type input to the operator. + + If this pin is set to 0, the classical erp is + computed, 1 the corrected erp is + computed (a mesh of one face has to + be given in the pin 1) and 2 the + enhanced erp is computed. default is + 0. + + Parameters + ---------- + my_erp_type : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> op.inputs.erp_type.connect(my_erp_type) + >>> # or + >>> op.inputs.erp_type(my_erp_type) + """ + return self._erp_type + + @property + def boolean(self): + """Allows to connect boolean input to the operator. + + If this pin is set to true, the erp level in + db is computed + + Parameters + ---------- + my_boolean : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> op.inputs.boolean.connect(my_boolean) + >>> # or + >>> op.inputs.boolean(my_boolean) + """ + return self._boolean + + @property + def factor(self): + """Allows to connect factor input to the operator. + + Erp reference value. default is 1e-12 + + Parameters + ---------- + my_factor : float + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> op.inputs.factor.connect(my_factor) + >>> # or + >>> op.inputs.factor(my_factor) + """ + return self._factor + + +class OutputsErpAccumulateResults(_Outputs): + """Intermediate class used to get outputs from + erp_accumulate_results operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(erp_accumulate_results._spec().outputs, op) + self._field = Output(erp_accumulate_results._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.result.erp_accumulate_results() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/result/nodal_averaged_stresses.py b/ansys/dpf/core/operators/result/erp_radiation_efficiency.py similarity index 50% rename from ansys/dpf/core/operators/result/nodal_averaged_stresses.py rename to ansys/dpf/core/operators/result/erp_radiation_efficiency.py index 6c07043fb2b..794b6579545 100644 --- a/ansys/dpf/core/operators/result/nodal_averaged_stresses.py +++ b/ansys/dpf/core/operators/result/erp_radiation_efficiency.py @@ -1,6 +1,6 @@ """ -nodal_averaged_stresses -======================= +erp_radiation_efficiency +======================== Autogenerated DPF operator classes. """ from warnings import warn @@ -10,21 +10,27 @@ from ansys.dpf.core.operators.specification import PinSpecification, Specification -class nodal_averaged_stresses(Operator): - """Read nodal averaged stresses as averaged nodal result from rst file. +class erp_radiation_efficiency(Operator): + """Compute the radiation efficiency (enhanced erp divided by classical + erp) Parameters ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional + fields_container : FieldsContainer + mesh : MeshedRegion or MeshesContainer + The meshes region in this pin has to be + boundary or skin mesh + time_scoping : int or Scoping + Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping + mass_density : float + Mass density (if it's not specified, default + value of the air is applied). + speed_of_sound : float + Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied). Examples @@ -32,30 +38,27 @@ class nodal_averaged_stresses(Operator): >>> from ansys.dpf import core as dpf >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_stresses() + >>> op = dpf.operators.result.erp_radiation_efficiency() >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) >>> my_fields_container = dpf.FieldsContainer() >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) >>> my_mesh = dpf.MeshedRegion() >>> op.inputs.mesh.connect(my_mesh) + >>> my_time_scoping = int() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mass_density = float() + >>> op.inputs.mass_density.connect(my_mass_density) + >>> my_speed_of_sound = float() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_stresses( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, + >>> op = dpf.operators.result.erp_radiation_efficiency( ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, ... mesh=my_mesh, + ... time_scoping=my_time_scoping, + ... mass_density=my_mass_density, + ... speed_of_sound=my_speed_of_sound, ... ) >>> # Get output data @@ -64,75 +67,70 @@ class nodal_averaged_stresses(Operator): def __init__( self, - time_scoping=None, - mesh_scoping=None, fields_container=None, - streams_container=None, - data_sources=None, mesh=None, + time_scoping=None, + mass_density=None, + speed_of_sound=None, config=None, server=None, ): - super().__init__(name="mapdl::rst::NS", config=config, server=server) - self._inputs = InputsNodalAveragedStresses(self) - self._outputs = OutputsNodalAveragedStresses(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) + super().__init__(name="erp_radiation_efficiency", config=config, server=server) + self._inputs = InputsErpRadiationEfficiency(self) + self._outputs = OutputsErpRadiationEfficiency(self) if fields_container is not None: self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) if mesh is not None: self.inputs.mesh.connect(mesh) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if mass_density is not None: + self.inputs.mass_density.connect(mass_density) + if speed_of_sound is not None: + self.inputs.speed_of_sound.connect(speed_of_sound) @staticmethod def _spec(): - description = ( - """Read nodal averaged stresses as averaged nodal result from rst file.""" - ) + description = """Compute the radiation efficiency (enhanced erp divided by classical + erp)""" spec = Specification( description=description, map_input_pin_spec={ 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, + name="fields_container", + type_names=["fields_container"], + optional=False, document="""""", ), 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", + name="mesh", + type_names=["abstract_meshed_region", "meshes_container"], + optional=False, + document="""The meshes region in this pin has to be + boundary or skin mesh""", ), 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", + name="time_scoping", + type_names=["int32", "vector", "scoping"], + optional=False, + document="""Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping""", ), 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", + name="mass_density", + type_names=["double"], + optional=False, + document="""Mass density (if it's not specified, default + value of the air is applied).""", ), 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], + name="speed_of_sound", + type_names=["double"], optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", + document="""Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied).""", ), }, map_output_pin_spec={ @@ -140,7 +138,7 @@ def _spec(): name="fields_container", type_names=["fields_container"], optional=False, - document="""Fieldscontainer filled in""", + document="""""", ), }, ) @@ -160,7 +158,7 @@ def default_config(server=None): Server with channel connected to the remote or local instance. When ``None``, attempts to use the global server. """ - return Operator.default_config(name="mapdl::rst::NS", server=server) + return Operator.default_config(name="erp_radiation_efficiency", server=server) @property def inputs(self): @@ -168,7 +166,7 @@ def inputs(self): Returns -------- - inputs : InputsNodalAveragedStresses + inputs : InputsErpRadiationEfficiency """ return super().inputs @@ -178,190 +176,173 @@ def outputs(self): Returns -------- - outputs : OutputsNodalAveragedStresses + outputs : OutputsErpRadiationEfficiency """ return super().outputs -class InputsNodalAveragedStresses(_Inputs): +class InputsErpRadiationEfficiency(_Inputs): """Intermediate class used to connect user inputs to - nodal_averaged_stresses operator. + erp_radiation_efficiency operator. Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> op = dpf.operators.result.erp_radiation_efficiency() >>> my_fields_container = dpf.FieldsContainer() >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) >>> my_mesh = dpf.MeshedRegion() >>> op.inputs.mesh.connect(my_mesh) + >>> my_time_scoping = int() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_mass_density = float() + >>> op.inputs.mass_density.connect(my_mass_density) + >>> my_speed_of_sound = float() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) """ def __init__(self, op: Operator): - super().__init__(nodal_averaged_stresses._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_stresses._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_stresses._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) + super().__init__(erp_radiation_efficiency._spec().inputs, op) self._fields_container = Input( - nodal_averaged_stresses._spec().input_pin(2), 2, op, -1 + erp_radiation_efficiency._spec().input_pin(0), 0, op, -1 ) self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_stresses._spec().input_pin(3), 3, op, -1 + self._mesh = Input(erp_radiation_efficiency._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._mesh) + self._time_scoping = Input( + erp_radiation_efficiency._spec().input_pin(2), 2, op, -1 ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_stresses._spec().input_pin(4), 4, op, -1 + self._inputs.append(self._time_scoping) + self._mass_density = Input( + erp_radiation_efficiency._spec().input_pin(3), 3, op, -1 ) - self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_stresses._spec().input_pin(7), 7, op, -1) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping + self._inputs.append(self._mass_density) + self._speed_of_sound = Input( + erp_radiation_efficiency._spec().input_pin(4), 4, op, -1 + ) + self._inputs.append(self._speed_of_sound) @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. + def fields_container(self): + """Allows to connect fields_container input to the operator. Parameters ---------- - my_mesh_scoping : ScopingsContainer or Scoping + my_fields_container : FieldsContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) + >>> op = dpf.operators.result.erp_radiation_efficiency() + >>> op.inputs.fields_container.connect(my_fields_container) >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) + >>> op.inputs.fields_container(my_fields_container) """ - return self._mesh_scoping + return self._fields_container @property - def fields_container(self): - """Allows to connect fields_container input to the operator. + def mesh(self): + """Allows to connect mesh input to the operator. - Fieldscontainer already allocated modified - inplace + The meshes region in this pin has to be + boundary or skin mesh Parameters ---------- - my_fields_container : FieldsContainer + my_mesh : MeshedRegion or MeshesContainer Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> op.inputs.fields_container.connect(my_fields_container) + >>> op = dpf.operators.result.erp_radiation_efficiency() + >>> op.inputs.mesh.connect(my_mesh) >>> # or - >>> op.inputs.fields_container(my_fields_container) + >>> op.inputs.mesh(my_mesh) """ - return self._fields_container + return self._mesh @property - def streams_container(self): - """Allows to connect streams_container input to the operator. + def time_scoping(self): + """Allows to connect time_scoping input to the operator. - Streams containing the result file. + Load step number (if it's specified, the erp + is computed only on the substeps of + this step) or time scoping Parameters ---------- - my_streams_container : StreamsContainer or Stream + my_time_scoping : int or Scoping Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> op.inputs.streams_container.connect(my_streams_container) + >>> op = dpf.operators.result.erp_radiation_efficiency() + >>> op.inputs.time_scoping.connect(my_time_scoping) >>> # or - >>> op.inputs.streams_container(my_streams_container) + >>> op.inputs.time_scoping(my_time_scoping) """ - return self._streams_container + return self._time_scoping @property - def data_sources(self): - """Allows to connect data_sources input to the operator. + def mass_density(self): + """Allows to connect mass_density input to the operator. - Data sources containing the result file. + Mass density (if it's not specified, default + value of the air is applied). Parameters ---------- - my_data_sources : DataSources + my_mass_density : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> op.inputs.data_sources.connect(my_data_sources) + >>> op = dpf.operators.result.erp_radiation_efficiency() + >>> op.inputs.mass_density.connect(my_mass_density) >>> # or - >>> op.inputs.data_sources(my_data_sources) + >>> op.inputs.mass_density(my_mass_density) """ - return self._data_sources + return self._mass_density @property - def mesh(self): - """Allows to connect mesh input to the operator. + def speed_of_sound(self): + """Allows to connect speed_of_sound input to the operator. + + Speed of sound (if it's not specified, + default value of the speed of sound + in the air is applied). Parameters ---------- - my_mesh : MeshedRegion + my_speed_of_sound : float Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() - >>> op.inputs.mesh.connect(my_mesh) + >>> op = dpf.operators.result.erp_radiation_efficiency() + >>> op.inputs.speed_of_sound.connect(my_speed_of_sound) >>> # or - >>> op.inputs.mesh(my_mesh) + >>> op.inputs.speed_of_sound(my_speed_of_sound) """ - return self._mesh + return self._speed_of_sound -class OutputsNodalAveragedStresses(_Outputs): +class OutputsErpRadiationEfficiency(_Outputs): """Intermediate class used to get outputs from - nodal_averaged_stresses operator. + erp_radiation_efficiency operator. Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() + >>> op = dpf.operators.result.erp_radiation_efficiency() >>> # Connect inputs : op.inputs. ... >>> result_fields_container = op.outputs.fields_container() """ def __init__(self, op: Operator): - super().__init__(nodal_averaged_stresses._spec().outputs, op) + super().__init__(erp_radiation_efficiency._spec().outputs, op) self._fields_container = Output( - nodal_averaged_stresses._spec().output_pin(0), 0, op + erp_radiation_efficiency._spec().output_pin(0), 0, op ) self._outputs.append(self._fields_container) @@ -376,7 +357,7 @@ def fields_container(self): Examples -------- >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_stresses() + >>> op = dpf.operators.result.erp_radiation_efficiency() >>> # Connect inputs : op.inputs. ... >>> result_fields_container = op.outputs.fields_container() """ # noqa: E501 diff --git a/ansys/dpf/core/operators/result/euler_load_buckling.py b/ansys/dpf/core/operators/result/euler_load_buckling.py index 68c655a0f41..01edc819fe4 100644 --- a/ansys/dpf/core/operators/result/euler_load_buckling.py +++ b/ansys/dpf/core/operators/result/euler_load_buckling.py @@ -16,7 +16,7 @@ class euler_load_buckling(Operator): Parameters ---------- field_beam_end_condition : DataSources or Field - this pin contains file csv or field of beam's + This pin contains file csv or field of beam's end condition added by the user. if there's no file added, it would take value of all beam's end condition as @@ -93,7 +93,7 @@ def _spec(): name="field_beam_end_condition", type_names=["data_sources", "field"], optional=False, - document="""this pin contains file csv or field of beam's + document="""This pin contains file csv or field of beam's end condition added by the user. if there's no file added, it would take value of all beam's end condition as @@ -224,7 +224,7 @@ def __init__(self, op: Operator): def field_beam_end_condition(self): """Allows to connect field_beam_end_condition input to the operator. - this pin contains file csv or field of beam's + This pin contains file csv or field of beam's end condition added by the user. if there's no file added, it would take value of all beam's end condition as diff --git a/ansys/dpf/core/operators/result/heat_flux.py b/ansys/dpf/core/operators/result/heat_flux.py index b3c1a6329bf..23b16a913fb 100644 --- a/ansys/dpf/core/operators/result/heat_flux.py +++ b/ansys/dpf/core/operators/result/heat_flux.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -341,7 +341,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(heat_flux._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(heat_flux._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(heat_flux._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/heat_flux_X.py b/ansys/dpf/core/operators/result/heat_flux_X.py index 9797d06e0b3..66b764f30e3 100644 --- a/ansys/dpf/core/operators/result/heat_flux_X.py +++ b/ansys/dpf/core/operators/result/heat_flux_X.py @@ -233,7 +233,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -339,7 +339,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(heat_flux_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(heat_flux_X._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(heat_flux_X._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/heat_flux_Y.py b/ansys/dpf/core/operators/result/heat_flux_Y.py index 7b89046a7c0..735aac53aa5 100644 --- a/ansys/dpf/core/operators/result/heat_flux_Y.py +++ b/ansys/dpf/core/operators/result/heat_flux_Y.py @@ -233,7 +233,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -339,7 +339,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(heat_flux_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(heat_flux_Y._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(heat_flux_Y._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/heat_flux_Z.py b/ansys/dpf/core/operators/result/heat_flux_Z.py index cf984124e3d..970aa388b3b 100644 --- a/ansys/dpf/core/operators/result/heat_flux_Z.py +++ b/ansys/dpf/core/operators/result/heat_flux_Z.py @@ -233,7 +233,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -339,7 +339,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(heat_flux_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(heat_flux_Z._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(heat_flux_Z._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/hydrostatic_pressure.py b/ansys/dpf/core/operators/result/hydrostatic_pressure.py index fddef8be285..2c859120319 100644 --- a/ansys/dpf/core/operators/result/hydrostatic_pressure.py +++ b/ansys/dpf/core/operators/result/hydrostatic_pressure.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -351,7 +351,7 @@ def __init__(self, op: Operator): hydrostatic_pressure._spec().input_pin(14), 14, op, -1 ) self._inputs.append(self._read_cyclic) - self._read_beams = Input(hydrostatic_pressure._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(hydrostatic_pressure._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/nodal_averaged_creep_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_creep_strains.py deleted file mode 100644 index 4f9658c218c..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_creep_strains.py +++ /dev/null @@ -1,383 +0,0 @@ -""" -nodal_averaged_creep_strains -============================ -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_creep_strains(Operator): - """Read nodal averaged creep strains as averaged nodal result from rst - file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_creep_strains( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NCR", config=config, server=server) - self._inputs = InputsNodalAveragedCreepStrains(self) - self._outputs = OutputsNodalAveragedCreepStrains(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged creep strains as averaged nodal result from rst - file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NCR", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedCreepStrains - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedCreepStrains - """ - return super().outputs - - -class InputsNodalAveragedCreepStrains(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_creep_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_creep_strains._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_creep_strains._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_creep_strains._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_creep_strains._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_creep_strains._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_creep_strains._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input(nodal_averaged_creep_strains._spec().input_pin(7), 7, op, -1) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedCreepStrains(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_creep_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_creep_strains._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_creep_strains._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_creep_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_averaged_elastic_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_elastic_strains.py deleted file mode 100644 index c072e353f81..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_elastic_strains.py +++ /dev/null @@ -1,385 +0,0 @@ -""" -nodal_averaged_elastic_strains -============================== -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_elastic_strains(Operator): - """Read nodal averaged elastic strains as averaged nodal result from rst - file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_elastic_strains( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NPEL", config=config, server=server) - self._inputs = InputsNodalAveragedElasticStrains(self) - self._outputs = OutputsNodalAveragedElasticStrains(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged elastic strains as averaged nodal result from rst - file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NPEL", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedElasticStrains - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedElasticStrains - """ - return super().outputs - - -class InputsNodalAveragedElasticStrains(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_elastic_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_elastic_strains._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_elastic_strains._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_elastic_strains._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_elastic_strains._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_elastic_strains._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_elastic_strains._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input( - nodal_averaged_elastic_strains._spec().input_pin(7), 7, op, -1 - ) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedElasticStrains(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_elastic_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_elastic_strains._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_elastic_strains._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_elastic_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_creep_strain.py b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_creep_strain.py deleted file mode 100644 index 6dc8bc68318..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_creep_strain.py +++ /dev/null @@ -1,385 +0,0 @@ -""" -nodal_averaged_equivalent_creep_strain -====================================== -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_equivalent_creep_strain(Operator): - """Read nodal averaged equivalent creep strain as averaged nodal result - from rst file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NCR_EQV", config=config, server=server) - self._inputs = InputsNodalAveragedEquivalentCreepStrain(self) - self._outputs = OutputsNodalAveragedEquivalentCreepStrain(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged equivalent creep strain as averaged nodal result - from rst file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NCR_EQV", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedEquivalentCreepStrain - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedEquivalentCreepStrain - """ - return super().outputs - - -class InputsNodalAveragedEquivalentCreepStrain(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_equivalent_creep_strain operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_equivalent_creep_strain._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_equivalent_creep_strain._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_equivalent_creep_strain._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_equivalent_creep_strain._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_equivalent_creep_strain._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_equivalent_creep_strain._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input( - nodal_averaged_equivalent_creep_strain._spec().input_pin(7), 7, op, -1 - ) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedEquivalentCreepStrain(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_equivalent_creep_strain operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_equivalent_creep_strain._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_equivalent_creep_strain._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_creep_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_elastic_strain.py b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_elastic_strain.py deleted file mode 100644 index 69408960644..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_elastic_strain.py +++ /dev/null @@ -1,385 +0,0 @@ -""" -nodal_averaged_equivalent_elastic_strain -======================================== -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_equivalent_elastic_strain(Operator): - """Read nodal averaged equivalent elastic strain as averaged nodal result - from rst file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NPEL_EQV", config=config, server=server) - self._inputs = InputsNodalAveragedEquivalentElasticStrain(self) - self._outputs = OutputsNodalAveragedEquivalentElasticStrain(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged equivalent elastic strain as averaged nodal result - from rst file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NPEL_EQV", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedEquivalentElasticStrain - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedEquivalentElasticStrain - """ - return super().outputs - - -class InputsNodalAveragedEquivalentElasticStrain(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_equivalent_elastic_strain operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_equivalent_elastic_strain._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_equivalent_elastic_strain._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_equivalent_elastic_strain._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_equivalent_elastic_strain._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_equivalent_elastic_strain._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_equivalent_elastic_strain._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input( - nodal_averaged_equivalent_elastic_strain._spec().input_pin(7), 7, op, -1 - ) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedEquivalentElasticStrain(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_equivalent_elastic_strain operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_equivalent_elastic_strain._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_equivalent_elastic_strain._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_elastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_plastic_strain.py b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_plastic_strain.py deleted file mode 100644 index f3557236eec..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_plastic_strain.py +++ /dev/null @@ -1,385 +0,0 @@ -""" -nodal_averaged_equivalent_plastic_strain -======================================== -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_equivalent_plastic_strain(Operator): - """Read nodal averaged equivalent plastic strain as averaged nodal result - from rst file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NPPL_EQV", config=config, server=server) - self._inputs = InputsNodalAveragedEquivalentPlasticStrain(self) - self._outputs = OutputsNodalAveragedEquivalentPlasticStrain(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged equivalent plastic strain as averaged nodal result - from rst file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NPPL_EQV", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedEquivalentPlasticStrain - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedEquivalentPlasticStrain - """ - return super().outputs - - -class InputsNodalAveragedEquivalentPlasticStrain(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_equivalent_plastic_strain operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_equivalent_plastic_strain._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_equivalent_plastic_strain._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_equivalent_plastic_strain._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_equivalent_plastic_strain._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_equivalent_plastic_strain._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_equivalent_plastic_strain._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input( - nodal_averaged_equivalent_plastic_strain._spec().input_pin(7), 7, op, -1 - ) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedEquivalentPlasticStrain(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_equivalent_plastic_strain operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_equivalent_plastic_strain._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_equivalent_plastic_strain._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_plastic_strain() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_thermal_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_equivalent_thermal_strains.py deleted file mode 100644 index c08a3763d76..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_equivalent_thermal_strains.py +++ /dev/null @@ -1,385 +0,0 @@ -""" -nodal_averaged_equivalent_thermal_strains -========================================= -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_equivalent_thermal_strains(Operator): - """Read nodal averaged equivalent thermal strains as averaged nodal - result from rst file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NTH_EQV", config=config, server=server) - self._inputs = InputsNodalAveragedEquivalentThermalStrains(self) - self._outputs = OutputsNodalAveragedEquivalentThermalStrains(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged equivalent thermal strains as averaged nodal - result from rst file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NTH_EQV", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedEquivalentThermalStrains - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedEquivalentThermalStrains - """ - return super().outputs - - -class InputsNodalAveragedEquivalentThermalStrains(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_equivalent_thermal_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_equivalent_thermal_strains._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_equivalent_thermal_strains._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_equivalent_thermal_strains._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_equivalent_thermal_strains._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_equivalent_thermal_strains._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_equivalent_thermal_strains._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input( - nodal_averaged_equivalent_thermal_strains._spec().input_pin(7), 7, op, -1 - ) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedEquivalentThermalStrains(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_equivalent_thermal_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_equivalent_thermal_strains._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_equivalent_thermal_strains._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_equivalent_thermal_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_averaged_plastic_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_plastic_strains.py deleted file mode 100644 index 43155044a04..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_plastic_strains.py +++ /dev/null @@ -1,385 +0,0 @@ -""" -nodal_averaged_plastic_strains -============================== -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_plastic_strains(Operator): - """Read nodal averaged plastic strains as averaged nodal result from rst - file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_plastic_strains( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NPPL", config=config, server=server) - self._inputs = InputsNodalAveragedPlasticStrains(self) - self._outputs = OutputsNodalAveragedPlasticStrains(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged plastic strains as averaged nodal result from rst - file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NPPL", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedPlasticStrains - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedPlasticStrains - """ - return super().outputs - - -class InputsNodalAveragedPlasticStrains(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_plastic_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_plastic_strains._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_plastic_strains._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_plastic_strains._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_plastic_strains._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_plastic_strains._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_plastic_strains._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input( - nodal_averaged_plastic_strains._spec().input_pin(7), 7, op, -1 - ) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedPlasticStrains(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_plastic_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_plastic_strains._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_plastic_strains._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_plastic_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_averaged_thermal_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_thermal_strains.py deleted file mode 100644 index 61a9b1c2a4f..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_thermal_strains.py +++ /dev/null @@ -1,385 +0,0 @@ -""" -nodal_averaged_thermal_strains -============================== -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_thermal_strains(Operator): - """Read nodal averaged thermal strains as averaged nodal result from rst - file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_thermal_strains( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NTH", config=config, server=server) - self._inputs = InputsNodalAveragedThermalStrains(self) - self._outputs = OutputsNodalAveragedThermalStrains(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged thermal strains as averaged nodal result from rst - file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NTH", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedThermalStrains - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedThermalStrains - """ - return super().outputs - - -class InputsNodalAveragedThermalStrains(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_thermal_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_thermal_strains._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_thermal_strains._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_thermal_strains._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_thermal_strains._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_thermal_strains._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_thermal_strains._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input( - nodal_averaged_thermal_strains._spec().input_pin(7), 7, op, -1 - ) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedThermalStrains(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_thermal_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_thermal_strains._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_thermal_strains._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_averaged_thermal_swelling_strains.py b/ansys/dpf/core/operators/result/nodal_averaged_thermal_swelling_strains.py deleted file mode 100644 index a20bb4d277c..00000000000 --- a/ansys/dpf/core/operators/result/nodal_averaged_thermal_swelling_strains.py +++ /dev/null @@ -1,385 +0,0 @@ -""" -nodal_averaged_thermal_swelling_strains -======================================= -Autogenerated DPF operator classes. -""" -from warnings import warn -from ansys.dpf.core.dpf_operator import Operator -from ansys.dpf.core.inputs import Input, _Inputs -from ansys.dpf.core.outputs import Output, _Outputs -from ansys.dpf.core.operators.specification import PinSpecification, Specification - - -class nodal_averaged_thermal_swelling_strains(Operator): - """Read nodal averaged thermal swelling strains as averaged nodal result - from rst file. - - Parameters - ---------- - time_scoping : Scoping, optional - mesh_scoping : ScopingsContainer or Scoping, optional - fields_container : FieldsContainer, optional - Fieldscontainer already allocated modified - inplace - streams_container : StreamsContainer or Stream, optional - Streams containing the result file. - data_sources : DataSources - Data sources containing the result file. - mesh : MeshedRegion, optional - - - Examples - -------- - >>> from ansys.dpf import core as dpf - - >>> # Instantiate operator - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - - >>> # Make input connections - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - - >>> # Instantiate operator and connect inputs in one line - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains( - ... time_scoping=my_time_scoping, - ... mesh_scoping=my_mesh_scoping, - ... fields_container=my_fields_container, - ... streams_container=my_streams_container, - ... data_sources=my_data_sources, - ... mesh=my_mesh, - ... ) - - >>> # Get output data - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__( - self, - time_scoping=None, - mesh_scoping=None, - fields_container=None, - streams_container=None, - data_sources=None, - mesh=None, - config=None, - server=None, - ): - super().__init__(name="mapdl::rst::NTH_SWL", config=config, server=server) - self._inputs = InputsNodalAveragedThermalSwellingStrains(self) - self._outputs = OutputsNodalAveragedThermalSwellingStrains(self) - if time_scoping is not None: - self.inputs.time_scoping.connect(time_scoping) - if mesh_scoping is not None: - self.inputs.mesh_scoping.connect(mesh_scoping) - if fields_container is not None: - self.inputs.fields_container.connect(fields_container) - if streams_container is not None: - self.inputs.streams_container.connect(streams_container) - if data_sources is not None: - self.inputs.data_sources.connect(data_sources) - if mesh is not None: - self.inputs.mesh.connect(mesh) - - @staticmethod - def _spec(): - description = """Read nodal averaged thermal swelling strains as averaged nodal result - from rst file.""" - spec = Specification( - description=description, - map_input_pin_spec={ - 0: PinSpecification( - name="time_scoping", - type_names=["scoping", "vector"], - optional=True, - document="""""", - ), - 1: PinSpecification( - name="mesh_scoping", - type_names=["scopings_container", "scoping", "vector"], - optional=True, - document="""""", - ), - 2: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=True, - document="""Fieldscontainer already allocated modified - inplace""", - ), - 3: PinSpecification( - name="streams_container", - type_names=["streams_container", "stream"], - optional=True, - document="""Streams containing the result file.""", - ), - 4: PinSpecification( - name="data_sources", - type_names=["data_sources"], - optional=False, - document="""Data sources containing the result file.""", - ), - 7: PinSpecification( - name="mesh", - type_names=["abstract_meshed_region"], - optional=True, - document="""""", - ), - }, - map_output_pin_spec={ - 0: PinSpecification( - name="fields_container", - type_names=["fields_container"], - optional=False, - document="""Fieldscontainer filled in""", - ), - }, - ) - return spec - - @staticmethod - def default_config(server=None): - """Returns the default config of the operator. - - This config can then be changed to the user needs and be used to - instantiate the operator. The Configuration allows to customize - how the operation will be processed by the operator. - - Parameters - ---------- - server : server.DPFServer, optional - Server with channel connected to the remote or local instance. When - ``None``, attempts to use the global server. - """ - return Operator.default_config(name="mapdl::rst::NTH_SWL", server=server) - - @property - def inputs(self): - """Enables to connect inputs to the operator - - Returns - -------- - inputs : InputsNodalAveragedThermalSwellingStrains - """ - return super().inputs - - @property - def outputs(self): - """Enables to get outputs of the operator by evaluationg it - - Returns - -------- - outputs : OutputsNodalAveragedThermalSwellingStrains - """ - return super().outputs - - -class InputsNodalAveragedThermalSwellingStrains(_Inputs): - """Intermediate class used to connect user inputs to - nodal_averaged_thermal_swelling_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> my_time_scoping = dpf.Scoping() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> my_mesh_scoping = dpf.ScopingsContainer() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> my_fields_container = dpf.FieldsContainer() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> my_streams_container = dpf.StreamsContainer() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> my_data_sources = dpf.DataSources() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> my_mesh = dpf.MeshedRegion() - >>> op.inputs.mesh.connect(my_mesh) - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_thermal_swelling_strains._spec().inputs, op) - self._time_scoping = Input( - nodal_averaged_thermal_swelling_strains._spec().input_pin(0), 0, op, -1 - ) - self._inputs.append(self._time_scoping) - self._mesh_scoping = Input( - nodal_averaged_thermal_swelling_strains._spec().input_pin(1), 1, op, -1 - ) - self._inputs.append(self._mesh_scoping) - self._fields_container = Input( - nodal_averaged_thermal_swelling_strains._spec().input_pin(2), 2, op, -1 - ) - self._inputs.append(self._fields_container) - self._streams_container = Input( - nodal_averaged_thermal_swelling_strains._spec().input_pin(3), 3, op, -1 - ) - self._inputs.append(self._streams_container) - self._data_sources = Input( - nodal_averaged_thermal_swelling_strains._spec().input_pin(4), 4, op, -1 - ) - self._inputs.append(self._data_sources) - self._mesh = Input( - nodal_averaged_thermal_swelling_strains._spec().input_pin(7), 7, op, -1 - ) - self._inputs.append(self._mesh) - - @property - def time_scoping(self): - """Allows to connect time_scoping input to the operator. - - Parameters - ---------- - my_time_scoping : Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> op.inputs.time_scoping.connect(my_time_scoping) - >>> # or - >>> op.inputs.time_scoping(my_time_scoping) - """ - return self._time_scoping - - @property - def mesh_scoping(self): - """Allows to connect mesh_scoping input to the operator. - - Parameters - ---------- - my_mesh_scoping : ScopingsContainer or Scoping - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> op.inputs.mesh_scoping.connect(my_mesh_scoping) - >>> # or - >>> op.inputs.mesh_scoping(my_mesh_scoping) - """ - return self._mesh_scoping - - @property - def fields_container(self): - """Allows to connect fields_container input to the operator. - - Fieldscontainer already allocated modified - inplace - - Parameters - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> op.inputs.fields_container.connect(my_fields_container) - >>> # or - >>> op.inputs.fields_container(my_fields_container) - """ - return self._fields_container - - @property - def streams_container(self): - """Allows to connect streams_container input to the operator. - - Streams containing the result file. - - Parameters - ---------- - my_streams_container : StreamsContainer or Stream - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> op.inputs.streams_container.connect(my_streams_container) - >>> # or - >>> op.inputs.streams_container(my_streams_container) - """ - return self._streams_container - - @property - def data_sources(self): - """Allows to connect data_sources input to the operator. - - Data sources containing the result file. - - Parameters - ---------- - my_data_sources : DataSources - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> op.inputs.data_sources.connect(my_data_sources) - >>> # or - >>> op.inputs.data_sources(my_data_sources) - """ - return self._data_sources - - @property - def mesh(self): - """Allows to connect mesh input to the operator. - - Parameters - ---------- - my_mesh : MeshedRegion - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> op.inputs.mesh.connect(my_mesh) - >>> # or - >>> op.inputs.mesh(my_mesh) - """ - return self._mesh - - -class OutputsNodalAveragedThermalSwellingStrains(_Outputs): - """Intermediate class used to get outputs from - nodal_averaged_thermal_swelling_strains operator. - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ - - def __init__(self, op: Operator): - super().__init__(nodal_averaged_thermal_swelling_strains._spec().outputs, op) - self._fields_container = Output( - nodal_averaged_thermal_swelling_strains._spec().output_pin(0), 0, op - ) - self._outputs.append(self._fields_container) - - @property - def fields_container(self): - """Allows to get fields_container output of the operator - - Returns - ---------- - my_fields_container : FieldsContainer - - Examples - -------- - >>> from ansys.dpf import core as dpf - >>> op = dpf.operators.result.nodal_averaged_thermal_swelling_strains() - >>> # Connect inputs : op.inputs. ... - >>> result_fields_container = op.outputs.fields_container() - """ # noqa: E501 - return self._fields_container diff --git a/ansys/dpf/core/operators/result/nodal_to_global.py b/ansys/dpf/core/operators/result/nodal_to_global.py index f3bc85b6a32..0f5b8dcec44 100644 --- a/ansys/dpf/core/operators/result/nodal_to_global.py +++ b/ansys/dpf/core/operators/result/nodal_to_global.py @@ -11,8 +11,7 @@ class nodal_to_global(Operator): - """Rotate results from nodal coordinate system to global coordinate - system. + """Rotates nodal elemental results to global coordinate system Parameters ---------- @@ -49,7 +48,9 @@ class nodal_to_global(Operator): """ def __init__(self, fieldA=None, fieldB=None, config=None, server=None): - super().__init__(name="NodalToGlobal", config=config, server=server) + super().__init__( + name="NodalElementalResultsRotation", config=config, server=server + ) self._inputs = InputsNodalToGlobal(self) self._outputs = OutputsNodalToGlobal(self) if fieldA is not None: @@ -59,8 +60,7 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None): @staticmethod def _spec(): - description = """Rotate results from nodal coordinate system to global coordinate - system.""" + description = """Rotates nodal elemental results to global coordinate system""" spec = Specification( description=description, map_input_pin_spec={ @@ -105,7 +105,9 @@ def default_config(server=None): Server with channel connected to the remote or local instance. When ``None``, attempts to use the global server. """ - return Operator.default_config(name="NodalToGlobal", server=server) + return Operator.default_config( + name="NodalElementalResultsRotation", server=server + ) @property def inputs(self): diff --git a/ansys/dpf/core/operators/result/plastic_state_variable.py b/ansys/dpf/core/operators/result/plastic_state_variable.py index 4e4467669de..fac37ff2c31 100644 --- a/ansys/dpf/core/operators/result/plastic_state_variable.py +++ b/ansys/dpf/core/operators/result/plastic_state_variable.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -358,7 +358,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - plastic_state_variable._spec().input_pin(21), 21, op, -1 + plastic_state_variable._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/plastic_strain.py b/ansys/dpf/core/operators/result/plastic_strain.py index 4ee8242b0aa..bfcc1188588 100644 --- a/ansys/dpf/core/operators/result/plastic_strain.py +++ b/ansys/dpf/core/operators/result/plastic_strain.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -343,7 +343,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(plastic_strain._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(plastic_strain._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/plastic_strain_X.py b/ansys/dpf/core/operators/result/plastic_strain_X.py index 64527c1baf9..3effb2d74bb 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_X.py +++ b/ansys/dpf/core/operators/result/plastic_strain_X.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(plastic_strain_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_X._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(plastic_strain_X._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/plastic_strain_XY.py b/ansys/dpf/core/operators/result/plastic_strain_XY.py index e30a5c54ca1..3ade5c77584 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_XY.py +++ b/ansys/dpf/core/operators/result/plastic_strain_XY.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(plastic_strain_XY._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_XY._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(plastic_strain_XY._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/plastic_strain_XZ.py b/ansys/dpf/core/operators/result/plastic_strain_XZ.py index d180e5c72f9..7c8c8226fe6 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_XZ.py +++ b/ansys/dpf/core/operators/result/plastic_strain_XZ.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(plastic_strain_XZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_XZ._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(plastic_strain_XZ._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/plastic_strain_Y.py b/ansys/dpf/core/operators/result/plastic_strain_Y.py index c2f00928dab..e4abe7c7fcc 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_Y.py +++ b/ansys/dpf/core/operators/result/plastic_strain_Y.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(plastic_strain_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_Y._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(plastic_strain_Y._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/plastic_strain_YZ.py b/ansys/dpf/core/operators/result/plastic_strain_YZ.py index 4a8893767f6..a6a15b505d1 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_YZ.py +++ b/ansys/dpf/core/operators/result/plastic_strain_YZ.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(plastic_strain_YZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_YZ._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(plastic_strain_YZ._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/plastic_strain_Z.py b/ansys/dpf/core/operators/result/plastic_strain_Z.py index 8f993bab493..962f638abd6 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_Z.py +++ b/ansys/dpf/core/operators/result/plastic_strain_Z.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(plastic_strain_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(plastic_strain_Z._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(plastic_strain_Z._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/plastic_strain_energy_density.py b/ansys/dpf/core/operators/result/plastic_strain_energy_density.py index d18ad3754b0..c4a7465c478 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_energy_density.py +++ b/ansys/dpf/core/operators/result/plastic_strain_energy_density.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -360,7 +360,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - plastic_strain_energy_density._spec().input_pin(21), 21, op, -1 + plastic_strain_energy_density._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/plastic_strain_principal_1.py b/ansys/dpf/core/operators/result/plastic_strain_principal_1.py index c2ee2825d9d..f082f0a2a4b 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_principal_1.py +++ b/ansys/dpf/core/operators/result/plastic_strain_principal_1.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - plastic_strain_principal_1._spec().input_pin(21), 21, op, -1 + plastic_strain_principal_1._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/plastic_strain_principal_2.py b/ansys/dpf/core/operators/result/plastic_strain_principal_2.py index b00c00bca2f..cac7727474d 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_principal_2.py +++ b/ansys/dpf/core/operators/result/plastic_strain_principal_2.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - plastic_strain_principal_2._spec().input_pin(21), 21, op, -1 + plastic_strain_principal_2._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/plastic_strain_principal_3.py b/ansys/dpf/core/operators/result/plastic_strain_principal_3.py index 09e28115e9a..c39934759eb 100644 --- a/ansys/dpf/core/operators/result/plastic_strain_principal_3.py +++ b/ansys/dpf/core/operators/result/plastic_strain_principal_3.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - plastic_strain_principal_3._spec().input_pin(21), 21, op, -1 + plastic_strain_principal_3._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/stress.py b/ansys/dpf/core/operators/result/stress.py index 3ef26f89151..216b8182d26 100644 --- a/ansys/dpf/core/operators/result/stress.py +++ b/ansys/dpf/core/operators/result/stress.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -341,7 +341,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_X.py b/ansys/dpf/core/operators/result/stress_X.py index dc6c2865cb7..7ecd40754df 100644 --- a/ansys/dpf/core/operators/result/stress_X.py +++ b/ansys/dpf/core/operators/result/stress_X.py @@ -234,7 +234,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -340,7 +340,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_X._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_X._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_XY.py b/ansys/dpf/core/operators/result/stress_XY.py index fa5b1cc1c8e..93fc3eb7ddc 100644 --- a/ansys/dpf/core/operators/result/stress_XY.py +++ b/ansys/dpf/core/operators/result/stress_XY.py @@ -234,7 +234,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -340,7 +340,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_XY._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_XY._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_XY._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_XZ.py b/ansys/dpf/core/operators/result/stress_XZ.py index d7c6a7ea801..db35b9805e0 100644 --- a/ansys/dpf/core/operators/result/stress_XZ.py +++ b/ansys/dpf/core/operators/result/stress_XZ.py @@ -234,7 +234,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -340,7 +340,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_XZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_XZ._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_XZ._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_Y.py b/ansys/dpf/core/operators/result/stress_Y.py index 03a05301a48..045cefacb46 100644 --- a/ansys/dpf/core/operators/result/stress_Y.py +++ b/ansys/dpf/core/operators/result/stress_Y.py @@ -234,7 +234,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -340,7 +340,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_Y._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_Y._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_YZ.py b/ansys/dpf/core/operators/result/stress_YZ.py index 51f3215c81f..2545326c5ac 100644 --- a/ansys/dpf/core/operators/result/stress_YZ.py +++ b/ansys/dpf/core/operators/result/stress_YZ.py @@ -234,7 +234,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -340,7 +340,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_YZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_YZ._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_YZ._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_Z.py b/ansys/dpf/core/operators/result/stress_Z.py index 4f86e364ee7..9f2c2aa7466 100644 --- a/ansys/dpf/core/operators/result/stress_Z.py +++ b/ansys/dpf/core/operators/result/stress_Z.py @@ -234,7 +234,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -340,7 +340,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_Z._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_Z._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_principal_1.py b/ansys/dpf/core/operators/result/stress_principal_1.py index 06df150f495..760dc8e0f86 100644 --- a/ansys/dpf/core/operators/result/stress_principal_1.py +++ b/ansys/dpf/core/operators/result/stress_principal_1.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -344,7 +344,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_principal_1._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_principal_1._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_principal_1._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_principal_2.py b/ansys/dpf/core/operators/result/stress_principal_2.py index 593817eb62d..15d44210071 100644 --- a/ansys/dpf/core/operators/result/stress_principal_2.py +++ b/ansys/dpf/core/operators/result/stress_principal_2.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -344,7 +344,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_principal_2._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_principal_2._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_principal_2._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_principal_3.py b/ansys/dpf/core/operators/result/stress_principal_3.py index 58b27dc9d94..7dd2823f2ec 100644 --- a/ansys/dpf/core/operators/result/stress_principal_3.py +++ b/ansys/dpf/core/operators/result/stress_principal_3.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -344,7 +344,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_principal_3._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_principal_3._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_principal_3._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_ratio.py b/ansys/dpf/core/operators/result/stress_ratio.py index 4de45cec82a..cfb7410c101 100644 --- a/ansys/dpf/core/operators/result/stress_ratio.py +++ b/ansys/dpf/core/operators/result/stress_ratio.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -343,7 +343,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_ratio._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_ratio._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_ratio._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/stress_von_mises.py b/ansys/dpf/core/operators/result/stress_von_mises.py index bc54dd45686..7f6d0df3310 100644 --- a/ansys/dpf/core/operators/result/stress_von_mises.py +++ b/ansys/dpf/core/operators/result/stress_von_mises.py @@ -228,7 +228,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -340,7 +340,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(stress_von_mises._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(stress_von_mises._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(stress_von_mises._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/structural_temperature.py b/ansys/dpf/core/operators/result/structural_temperature.py index f67ae67c8e3..f45fe553cd3 100644 --- a/ansys/dpf/core/operators/result/structural_temperature.py +++ b/ansys/dpf/core/operators/result/structural_temperature.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -358,7 +358,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - structural_temperature._spec().input_pin(21), 21, op, -1 + structural_temperature._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/swelling_strains.py b/ansys/dpf/core/operators/result/swelling_strains.py index 1ffc120effd..3192a7151a3 100644 --- a/ansys/dpf/core/operators/result/swelling_strains.py +++ b/ansys/dpf/core/operators/result/swelling_strains.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(swelling_strains._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(swelling_strains._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(swelling_strains._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/temperature_grad.py b/ansys/dpf/core/operators/result/temperature_grad.py index a9cf88a4b09..841a4dfaf5c 100644 --- a/ansys/dpf/core/operators/result/temperature_grad.py +++ b/ansys/dpf/core/operators/result/temperature_grad.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(temperature_grad._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(temperature_grad._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(temperature_grad._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/thermal_strain.py b/ansys/dpf/core/operators/result/thermal_strain.py index 1708a6b5947..25e2bb8f70e 100644 --- a/ansys/dpf/core/operators/result/thermal_strain.py +++ b/ansys/dpf/core/operators/result/thermal_strain.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -343,7 +343,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(thermal_strain._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(thermal_strain._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/thermal_strain_X.py b/ansys/dpf/core/operators/result/thermal_strain_X.py index 2c5f52360fa..1e310dc3b4e 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_X.py +++ b/ansys/dpf/core/operators/result/thermal_strain_X.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(thermal_strain_X._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_X._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(thermal_strain_X._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/thermal_strain_XY.py b/ansys/dpf/core/operators/result/thermal_strain_XY.py index 66a5f118e3d..a6d37a7fd1e 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_XY.py +++ b/ansys/dpf/core/operators/result/thermal_strain_XY.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(thermal_strain_XY._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_XY._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(thermal_strain_XY._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/thermal_strain_XZ.py b/ansys/dpf/core/operators/result/thermal_strain_XZ.py index 47897e848d6..4e92bee6768 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_XZ.py +++ b/ansys/dpf/core/operators/result/thermal_strain_XZ.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(thermal_strain_XZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_XZ._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(thermal_strain_XZ._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/thermal_strain_Y.py b/ansys/dpf/core/operators/result/thermal_strain_Y.py index b29ccae9f23..d580da02adf 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_Y.py +++ b/ansys/dpf/core/operators/result/thermal_strain_Y.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(thermal_strain_Y._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_Y._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(thermal_strain_Y._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/thermal_strain_YZ.py b/ansys/dpf/core/operators/result/thermal_strain_YZ.py index b5a3edf42d6..f0783f8ee22 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_YZ.py +++ b/ansys/dpf/core/operators/result/thermal_strain_YZ.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(thermal_strain_YZ._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_YZ._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(thermal_strain_YZ._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/thermal_strain_Z.py b/ansys/dpf/core/operators/result/thermal_strain_Z.py index 8fafb79a718..096e203302b 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_Z.py +++ b/ansys/dpf/core/operators/result/thermal_strain_Z.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -347,7 +347,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(thermal_strain_Z._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strain_Z._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(thermal_strain_Z._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/result/thermal_strain_principal_1.py b/ansys/dpf/core/operators/result/thermal_strain_principal_1.py index dbd5d5ff662..6c6f43d5976 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_principal_1.py +++ b/ansys/dpf/core/operators/result/thermal_strain_principal_1.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - thermal_strain_principal_1._spec().input_pin(21), 21, op, -1 + thermal_strain_principal_1._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/thermal_strain_principal_2.py b/ansys/dpf/core/operators/result/thermal_strain_principal_2.py index f070aa47ab0..bc2f470cc34 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_principal_2.py +++ b/ansys/dpf/core/operators/result/thermal_strain_principal_2.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - thermal_strain_principal_2._spec().input_pin(21), 21, op, -1 + thermal_strain_principal_2._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/thermal_strain_principal_3.py b/ansys/dpf/core/operators/result/thermal_strain_principal_3.py index b7f3a67f06f..3a9d5684cb7 100644 --- a/ansys/dpf/core/operators/result/thermal_strain_principal_3.py +++ b/ansys/dpf/core/operators/result/thermal_strain_principal_3.py @@ -230,7 +230,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -353,7 +353,7 @@ def __init__(self, op: Operator): ) self._inputs.append(self._read_cyclic) self._read_beams = Input( - thermal_strain_principal_3._spec().input_pin(21), 21, op, -1 + thermal_strain_principal_3._spec().input_pin(22), 22, op, -1 ) self._inputs.append(self._read_beams) diff --git a/ansys/dpf/core/operators/result/thermal_strains_eqv.py b/ansys/dpf/core/operators/result/thermal_strains_eqv.py index e128be252f9..b7cf4ce2fbc 100644 --- a/ansys/dpf/core/operators/result/thermal_strains_eqv.py +++ b/ansys/dpf/core/operators/result/thermal_strains_eqv.py @@ -235,7 +235,7 @@ def _spec(): done and stages are merged (default is 1)""", ), - 21: PinSpecification( + 22: PinSpecification( name="read_beams", type_names=["bool"], optional=True, @@ -349,7 +349,7 @@ def __init__(self, op: Operator): self._inputs.append(self._requested_location) self._read_cyclic = Input(thermal_strains_eqv._spec().input_pin(14), 14, op, -1) self._inputs.append(self._read_cyclic) - self._read_beams = Input(thermal_strains_eqv._spec().input_pin(21), 21, op, -1) + self._read_beams = Input(thermal_strains_eqv._spec().input_pin(22), 22, op, -1) self._inputs.append(self._read_beams) @property diff --git a/ansys/dpf/core/operators/scoping/split_on_property_type.py b/ansys/dpf/core/operators/scoping/split_on_property_type.py index 53666fdd676..b105a0de87a 100644 --- a/ansys/dpf/core/operators/scoping/split_on_property_type.py +++ b/ansys/dpf/core/operators/scoping/split_on_property_type.py @@ -23,6 +23,11 @@ class split_on_property_type(Operator): Mesh region requested_location : str Location (default is elemental) + skin_case : bool, optional + Set to 0: to have skin elements in their own + group, 1: merge skin and solid + elements, 2: merge skin and shell + elements (default) label1 : str, optional Properties to apply the filtering 'mat' and/or 'elshape' (default is @@ -47,6 +52,8 @@ class split_on_property_type(Operator): >>> op.inputs.mesh.connect(my_mesh) >>> my_requested_location = str() >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_skin_case = bool() + >>> op.inputs.skin_case.connect(my_skin_case) >>> my_label1 = str() >>> op.inputs.label1.connect(my_label1) >>> my_label2 = str() @@ -57,6 +64,7 @@ class split_on_property_type(Operator): ... mesh_scoping=my_mesh_scoping, ... mesh=my_mesh, ... requested_location=my_requested_location, + ... skin_case=my_skin_case, ... label1=my_label1, ... label2=my_label2, ... ) @@ -70,6 +78,7 @@ def __init__( mesh_scoping=None, mesh=None, requested_location=None, + skin_case=None, label1=None, label2=None, config=None, @@ -84,6 +93,8 @@ def __init__( self.inputs.mesh.connect(mesh) if requested_location is not None: self.inputs.requested_location.connect(requested_location) + if skin_case is not None: + self.inputs.skin_case.connect(skin_case) if label1 is not None: self.inputs.label1.connect(label1) if label2 is not None: @@ -115,6 +126,15 @@ def _spec(): optional=False, document="""Location (default is elemental)""", ), + 12: PinSpecification( + name="skin_case", + type_names=["bool"], + optional=True, + document="""Set to 0: to have skin elements in their own + group, 1: merge skin and solid + elements, 2: merge skin and shell + elements (default)""", + ), 13: PinSpecification( name="label", type_names=["string"], @@ -194,6 +214,8 @@ class InputsSplitOnPropertyType(_Inputs): >>> op.inputs.mesh.connect(my_mesh) >>> my_requested_location = str() >>> op.inputs.requested_location.connect(my_requested_location) + >>> my_skin_case = bool() + >>> op.inputs.skin_case.connect(my_skin_case) >>> my_label1 = str() >>> op.inputs.label1.connect(my_label1) >>> my_label2 = str() @@ -212,6 +234,10 @@ def __init__(self, op: Operator): split_on_property_type._spec().input_pin(9), 9, op, -1 ) self._inputs.append(self._requested_location) + self._skin_case = Input( + split_on_property_type._spec().input_pin(12), 12, op, -1 + ) + self._inputs.append(self._skin_case) self._label1 = Input(split_on_property_type._spec().input_pin(13), 13, op, 0) self._inputs.append(self._label1) self._label2 = Input(split_on_property_type._spec().input_pin(14), 14, op, 1) @@ -277,6 +303,29 @@ def requested_location(self): """ return self._requested_location + @property + def skin_case(self): + """Allows to connect skin_case input to the operator. + + Set to 0: to have skin elements in their own + group, 1: merge skin and solid + elements, 2: merge skin and shell + elements (default) + + Parameters + ---------- + my_skin_case : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.scoping.split_on_property_type() + >>> op.inputs.skin_case.connect(my_skin_case) + >>> # or + >>> op.inputs.skin_case(my_skin_case) + """ + return self._skin_case + @property def label1(self): """Allows to connect label1 input to the operator. diff --git a/ansys/dpf/core/operators/serialization/__init__.py b/ansys/dpf/core/operators/serialization/__init__.py index 1c152fb6c52..cd613b0578c 100644 --- a/ansys/dpf/core/operators/serialization/__init__.py +++ b/ansys/dpf/core/operators/serialization/__init__.py @@ -1,9 +1,11 @@ +from .export_symbolic_workflow import export_symbolic_workflow from .mechanical_csv_to_field import mechanical_csv_to_field from .serializer import serializer from .field_to_csv import field_to_csv from .csv_to_field import csv_to_field from .txt_to_data_tree import txt_to_data_tree from .data_tree_to_txt import data_tree_to_txt +from .import_symbolic_workflow import import_symbolic_workflow from .deserializer import deserializer from .serializer_to_string import serializer_to_string from .string_deserializer import string_deserializer @@ -13,3 +15,7 @@ from .vtk_to_fields import vtk_to_fields from .migrate_file_to_vtk import migrate_file_to_vtk from .serialize_to_hdf5 import serialize_to_hdf5 +from .workflow_import_json import workflow_import_json +from .workflow_export_json import workflow_export_json +from .vtu_export import vtu_export +from .migrate_to_vtu import migrate_to_vtu diff --git a/ansys/dpf/core/operators/serialization/data_tree_to_json.py b/ansys/dpf/core/operators/serialization/data_tree_to_json.py index 2edb3fcf0d2..4f691ce594c 100644 --- a/ansys/dpf/core/operators/serialization/data_tree_to_json.py +++ b/ansys/dpf/core/operators/serialization/data_tree_to_json.py @@ -12,7 +12,7 @@ class data_tree_to_json(Operator): - """Write a json file or string from a DataTree + """Writes a json file or string from a DataTree Parameters ---------- @@ -54,7 +54,7 @@ def __init__(self, data_tree=None, path=None, config=None, server=None): @staticmethod def _spec(): - description = """Write a json file or string from a DataTree""" + description = """Writes a json file or string from a DataTree""" spec = Specification( description=description, map_input_pin_spec={ diff --git a/ansys/dpf/core/operators/serialization/data_tree_to_txt.py b/ansys/dpf/core/operators/serialization/data_tree_to_txt.py index 6c2fd8ab16e..87da19ad4f1 100644 --- a/ansys/dpf/core/operators/serialization/data_tree_to_txt.py +++ b/ansys/dpf/core/operators/serialization/data_tree_to_txt.py @@ -12,7 +12,7 @@ class data_tree_to_txt(Operator): - """Write a txt file or string from a DataTree + """Writes a txt file or string from a DataTree Parameters ---------- @@ -54,7 +54,7 @@ def __init__(self, data_tree=None, path=None, config=None, server=None): @staticmethod def _spec(): - description = """Write a txt file or string from a DataTree""" + description = """Writes a txt file or string from a DataTree""" spec = Specification( description=description, map_input_pin_spec={ diff --git a/ansys/dpf/core/operators/serialization/export_symbolic_workflow.py b/ansys/dpf/core/operators/serialization/export_symbolic_workflow.py new file mode 100644 index 00000000000..df54298ba00 --- /dev/null +++ b/ansys/dpf/core/operators/serialization/export_symbolic_workflow.py @@ -0,0 +1,251 @@ +""" +export_symbolic_workflow +======================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class export_symbolic_workflow(Operator): + """Transforms a Workflow into a symbolic Workflow and writes it to a file + (if a path is set in input) or string + + Parameters + ---------- + workflow : Workflow + path : str, optional + format : int, optional + 0 is ascii format and 1 is binary, default is + 0. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.export_symbolic_workflow() + + >>> # Make input connections + >>> my_workflow = dpf.Workflow() + >>> op.inputs.workflow.connect(my_workflow) + >>> my_path = str() + >>> op.inputs.path.connect(my_path) + >>> my_format = int() + >>> op.inputs.format.connect(my_format) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.export_symbolic_workflow( + ... workflow=my_workflow, + ... path=my_path, + ... format=my_format, + ... ) + + >>> # Get output data + >>> result_data_sources = op.outputs.data_sources() + """ + + def __init__(self, workflow=None, path=None, format=None, config=None, server=None): + super().__init__(name="export_symbolic_workflow", config=config, server=server) + self._inputs = InputsExportSymbolicWorkflow(self) + self._outputs = OutputsExportSymbolicWorkflow(self) + if workflow is not None: + self.inputs.workflow.connect(workflow) + if path is not None: + self.inputs.path.connect(path) + if format is not None: + self.inputs.format.connect(format) + + @staticmethod + def _spec(): + description = """Transforms a Workflow into a symbolic Workflow and writes it to a file + (if a path is set in input) or string""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="workflow", + type_names=["workflow"], + optional=False, + document="""""", + ), + 1: PinSpecification( + name="path", + type_names=["string"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="format", + type_names=["int32"], + optional=True, + document="""0 is ascii format and 1 is binary, default is + 0.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="data_sources", + type_names=["data_sources", "string"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="export_symbolic_workflow", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsExportSymbolicWorkflow + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsExportSymbolicWorkflow + """ + return super().outputs + + +class InputsExportSymbolicWorkflow(_Inputs): + """Intermediate class used to connect user inputs to + export_symbolic_workflow operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.export_symbolic_workflow() + >>> my_workflow = dpf.Workflow() + >>> op.inputs.workflow.connect(my_workflow) + >>> my_path = str() + >>> op.inputs.path.connect(my_path) + >>> my_format = int() + >>> op.inputs.format.connect(my_format) + """ + + def __init__(self, op: Operator): + super().__init__(export_symbolic_workflow._spec().inputs, op) + self._workflow = Input(export_symbolic_workflow._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._workflow) + self._path = Input(export_symbolic_workflow._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._path) + self._format = Input(export_symbolic_workflow._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._format) + + @property + def workflow(self): + """Allows to connect workflow input to the operator. + + Parameters + ---------- + my_workflow : Workflow + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.export_symbolic_workflow() + >>> op.inputs.workflow.connect(my_workflow) + >>> # or + >>> op.inputs.workflow(my_workflow) + """ + return self._workflow + + @property + def path(self): + """Allows to connect path input to the operator. + + Parameters + ---------- + my_path : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.export_symbolic_workflow() + >>> op.inputs.path.connect(my_path) + >>> # or + >>> op.inputs.path(my_path) + """ + return self._path + + @property + def format(self): + """Allows to connect format input to the operator. + + 0 is ascii format and 1 is binary, default is + 0. + + Parameters + ---------- + my_format : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.export_symbolic_workflow() + >>> op.inputs.format.connect(my_format) + >>> # or + >>> op.inputs.format(my_format) + """ + return self._format + + +class OutputsExportSymbolicWorkflow(_Outputs): + """Intermediate class used to get outputs from + export_symbolic_workflow operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.export_symbolic_workflow() + >>> # Connect inputs : op.inputs. ... + >>> result_data_sources = op.outputs.data_sources() + """ + + def __init__(self, op: Operator): + super().__init__(export_symbolic_workflow._spec().outputs, op) + self.data_sources_as_data_sources = Output( + _modify_output_spec_with_one_type( + export_symbolic_workflow._spec().output_pin(0), "data_sources" + ), + 0, + op, + ) + self._outputs.append(self.data_sources_as_data_sources) + self.data_sources_as_string = Output( + _modify_output_spec_with_one_type( + export_symbolic_workflow._spec().output_pin(0), "string" + ), + 0, + op, + ) + self._outputs.append(self.data_sources_as_string) diff --git a/ansys/dpf/core/operators/serialization/import_symbolic_workflow.py b/ansys/dpf/core/operators/serialization/import_symbolic_workflow.py new file mode 100644 index 00000000000..9deebc0f3bf --- /dev/null +++ b/ansys/dpf/core/operators/serialization/import_symbolic_workflow.py @@ -0,0 +1,221 @@ +""" +import_symbolic_workflow +======================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class import_symbolic_workflow(Operator): + """Reads a file or string holding a Symbolic Workflow and instantiate a + WorkFlow with its data. + + Parameters + ---------- + string_or_path : str or DataSources + format : int, optional + 0 is ascii format and 1 is binary, default is + 0. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.import_symbolic_workflow() + + >>> # Make input connections + >>> my_string_or_path = str() + >>> op.inputs.string_or_path.connect(my_string_or_path) + >>> my_format = int() + >>> op.inputs.format.connect(my_format) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.import_symbolic_workflow( + ... string_or_path=my_string_or_path, + ... format=my_format, + ... ) + + >>> # Get output data + >>> result_workflow = op.outputs.workflow() + """ + + def __init__(self, string_or_path=None, format=None, config=None, server=None): + super().__init__(name="import_symbolic_workflow", config=config, server=server) + self._inputs = InputsImportSymbolicWorkflow(self) + self._outputs = OutputsImportSymbolicWorkflow(self) + if string_or_path is not None: + self.inputs.string_or_path.connect(string_or_path) + if format is not None: + self.inputs.format.connect(format) + + @staticmethod + def _spec(): + description = """Reads a file or string holding a Symbolic Workflow and instantiate a + WorkFlow with its data.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="string_or_path", + type_names=["string", "data_sources"], + optional=False, + document="""""", + ), + 2: PinSpecification( + name="format", + type_names=["int32"], + optional=True, + document="""0 is ascii format and 1 is binary, default is + 0.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="workflow", + type_names=["workflow"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="import_symbolic_workflow", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsImportSymbolicWorkflow + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsImportSymbolicWorkflow + """ + return super().outputs + + +class InputsImportSymbolicWorkflow(_Inputs): + """Intermediate class used to connect user inputs to + import_symbolic_workflow operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.import_symbolic_workflow() + >>> my_string_or_path = str() + >>> op.inputs.string_or_path.connect(my_string_or_path) + >>> my_format = int() + >>> op.inputs.format.connect(my_format) + """ + + def __init__(self, op: Operator): + super().__init__(import_symbolic_workflow._spec().inputs, op) + self._string_or_path = Input( + import_symbolic_workflow._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._string_or_path) + self._format = Input(import_symbolic_workflow._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._format) + + @property + def string_or_path(self): + """Allows to connect string_or_path input to the operator. + + Parameters + ---------- + my_string_or_path : str or DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.import_symbolic_workflow() + >>> op.inputs.string_or_path.connect(my_string_or_path) + >>> # or + >>> op.inputs.string_or_path(my_string_or_path) + """ + return self._string_or_path + + @property + def format(self): + """Allows to connect format input to the operator. + + 0 is ascii format and 1 is binary, default is + 0. + + Parameters + ---------- + my_format : int + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.import_symbolic_workflow() + >>> op.inputs.format.connect(my_format) + >>> # or + >>> op.inputs.format(my_format) + """ + return self._format + + +class OutputsImportSymbolicWorkflow(_Outputs): + """Intermediate class used to get outputs from + import_symbolic_workflow operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.import_symbolic_workflow() + >>> # Connect inputs : op.inputs. ... + >>> result_workflow = op.outputs.workflow() + """ + + def __init__(self, op: Operator): + super().__init__(import_symbolic_workflow._spec().outputs, op) + self._workflow = Output(import_symbolic_workflow._spec().output_pin(0), 0, op) + self._outputs.append(self._workflow) + + @property + def workflow(self): + """Allows to get workflow output of the operator + + Returns + ---------- + my_workflow : Workflow + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.import_symbolic_workflow() + >>> # Connect inputs : op.inputs. ... + >>> result_workflow = op.outputs.workflow() + """ # noqa: E501 + return self._workflow diff --git a/ansys/dpf/core/operators/serialization/migrate_to_vtu.py b/ansys/dpf/core/operators/serialization/migrate_to_vtu.py new file mode 100644 index 00000000000..b794ef9305c --- /dev/null +++ b/ansys/dpf/core/operators/serialization/migrate_to_vtu.py @@ -0,0 +1,485 @@ +""" +migrate_to_vtu +============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class migrate_to_vtu(Operator): + """Extract all results from a datasources and exports them into vtu + format. All the connected inputs are forwarded to the result + providers operators. + + Parameters + ---------- + time_scoping : Scoping, optional + Time sets to export, default is all + streams_container : StreamsContainer, optional + Result file container allowed to be kept open + to cache data + data_sources : DataSources + Result file path container, used if no + streams are set + directory : str + Directory path + base_name : str, optional + Vtu base file name, (default is file) + result1 : str, optional + If operator's names are connected to this + pin, only these results are exported + (else all available results are + exported) + result2 : str, optional + If operator's names are connected to this + pin, only these results are exported + (else all available results are + exported) + write_mode : str, optional + Available are rawbinarycompressed, rawbinary, + base64appended, base64inline, ascii, + default is (rawbinarycompressed) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.migrate_to_vtu() + + >>> # Make input connections + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_directory = str() + >>> op.inputs.directory.connect(my_directory) + >>> my_base_name = str() + >>> op.inputs.base_name.connect(my_base_name) + >>> my_result1 = str() + >>> op.inputs.result1.connect(my_result1) + >>> my_result2 = str() + >>> op.inputs.result2.connect(my_result2) + >>> my_write_mode = str() + >>> op.inputs.write_mode.connect(my_write_mode) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.migrate_to_vtu( + ... time_scoping=my_time_scoping, + ... streams_container=my_streams_container, + ... data_sources=my_data_sources, + ... directory=my_directory, + ... base_name=my_base_name, + ... result1=my_result1, + ... result2=my_result2, + ... write_mode=my_write_mode, + ... ) + + >>> # Get output data + >>> result_path = op.outputs.path() + """ + + def __init__( + self, + time_scoping=None, + streams_container=None, + data_sources=None, + directory=None, + base_name=None, + result1=None, + result2=None, + write_mode=None, + config=None, + server=None, + ): + super().__init__(name="migrate_to_vtu", config=config, server=server) + self._inputs = InputsMigrateToVtu(self) + self._outputs = OutputsMigrateToVtu(self) + if time_scoping is not None: + self.inputs.time_scoping.connect(time_scoping) + if streams_container is not None: + self.inputs.streams_container.connect(streams_container) + if data_sources is not None: + self.inputs.data_sources.connect(data_sources) + if directory is not None: + self.inputs.directory.connect(directory) + if base_name is not None: + self.inputs.base_name.connect(base_name) + if result1 is not None: + self.inputs.result1.connect(result1) + if result2 is not None: + self.inputs.result2.connect(result2) + if write_mode is not None: + self.inputs.write_mode.connect(write_mode) + + @staticmethod + def _spec(): + description = """Extract all results from a datasources and exports them into vtu + format. All the connected inputs are forwarded to the + result providers operators.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="time_scoping", + type_names=["scoping", "vector"], + optional=True, + document="""Time sets to export, default is all""", + ), + 3: PinSpecification( + name="streams_container", + type_names=["streams_container"], + optional=True, + document="""Result file container allowed to be kept open + to cache data""", + ), + 4: PinSpecification( + name="data_sources", + type_names=["data_sources"], + optional=False, + document="""Result file path container, used if no + streams are set""", + ), + 20: PinSpecification( + name="directory", + type_names=["string"], + optional=False, + document="""Directory path""", + ), + 21: PinSpecification( + name="base_name", + type_names=["string"], + optional=True, + document="""Vtu base file name, (default is file)""", + ), + 30: PinSpecification( + name="result", + type_names=["string"], + optional=True, + document="""If operator's names are connected to this + pin, only these results are exported + (else all available results are + exported)""", + ), + 31: PinSpecification( + name="result", + type_names=["string"], + optional=True, + document="""If operator's names are connected to this + pin, only these results are exported + (else all available results are + exported)""", + ), + 100: PinSpecification( + name="write_mode", + type_names=["string"], + optional=True, + document="""Available are rawbinarycompressed, rawbinary, + base64appended, base64inline, ascii, + default is (rawbinarycompressed)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="path", + type_names=["data_sources"], + optional=False, + document="""List of output vtu file path""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="migrate_to_vtu", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMigrateToVtu + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMigrateToVtu + """ + return super().outputs + + +class InputsMigrateToVtu(_Inputs): + """Intermediate class used to connect user inputs to + migrate_to_vtu operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> my_time_scoping = dpf.Scoping() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> my_streams_container = dpf.StreamsContainer() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> my_data_sources = dpf.DataSources() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> my_directory = str() + >>> op.inputs.directory.connect(my_directory) + >>> my_base_name = str() + >>> op.inputs.base_name.connect(my_base_name) + >>> my_result1 = str() + >>> op.inputs.result1.connect(my_result1) + >>> my_result2 = str() + >>> op.inputs.result2.connect(my_result2) + >>> my_write_mode = str() + >>> op.inputs.write_mode.connect(my_write_mode) + """ + + def __init__(self, op: Operator): + super().__init__(migrate_to_vtu._spec().inputs, op) + self._time_scoping = Input(migrate_to_vtu._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._time_scoping) + self._streams_container = Input(migrate_to_vtu._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._streams_container) + self._data_sources = Input(migrate_to_vtu._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._data_sources) + self._directory = Input(migrate_to_vtu._spec().input_pin(20), 20, op, -1) + self._inputs.append(self._directory) + self._base_name = Input(migrate_to_vtu._spec().input_pin(21), 21, op, -1) + self._inputs.append(self._base_name) + self._result1 = Input(migrate_to_vtu._spec().input_pin(30), 30, op, 0) + self._inputs.append(self._result1) + self._result2 = Input(migrate_to_vtu._spec().input_pin(31), 31, op, 1) + self._inputs.append(self._result2) + self._write_mode = Input(migrate_to_vtu._spec().input_pin(100), 100, op, -1) + self._inputs.append(self._write_mode) + + @property + def time_scoping(self): + """Allows to connect time_scoping input to the operator. + + Time sets to export, default is all + + Parameters + ---------- + my_time_scoping : Scoping + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> op.inputs.time_scoping.connect(my_time_scoping) + >>> # or + >>> op.inputs.time_scoping(my_time_scoping) + """ + return self._time_scoping + + @property + def streams_container(self): + """Allows to connect streams_container input to the operator. + + Result file container allowed to be kept open + to cache data + + Parameters + ---------- + my_streams_container : StreamsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> op.inputs.streams_container.connect(my_streams_container) + >>> # or + >>> op.inputs.streams_container(my_streams_container) + """ + return self._streams_container + + @property + def data_sources(self): + """Allows to connect data_sources input to the operator. + + Result file path container, used if no + streams are set + + Parameters + ---------- + my_data_sources : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> op.inputs.data_sources.connect(my_data_sources) + >>> # or + >>> op.inputs.data_sources(my_data_sources) + """ + return self._data_sources + + @property + def directory(self): + """Allows to connect directory input to the operator. + + Directory path + + Parameters + ---------- + my_directory : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> op.inputs.directory.connect(my_directory) + >>> # or + >>> op.inputs.directory(my_directory) + """ + return self._directory + + @property + def base_name(self): + """Allows to connect base_name input to the operator. + + Vtu base file name, (default is file) + + Parameters + ---------- + my_base_name : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> op.inputs.base_name.connect(my_base_name) + >>> # or + >>> op.inputs.base_name(my_base_name) + """ + return self._base_name + + @property + def result1(self): + """Allows to connect result1 input to the operator. + + If operator's names are connected to this + pin, only these results are exported + (else all available results are + exported) + + Parameters + ---------- + my_result1 : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> op.inputs.result1.connect(my_result1) + >>> # or + >>> op.inputs.result1(my_result1) + """ + return self._result1 + + @property + def result2(self): + """Allows to connect result2 input to the operator. + + If operator's names are connected to this + pin, only these results are exported + (else all available results are + exported) + + Parameters + ---------- + my_result2 : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> op.inputs.result2.connect(my_result2) + >>> # or + >>> op.inputs.result2(my_result2) + """ + return self._result2 + + @property + def write_mode(self): + """Allows to connect write_mode input to the operator. + + Available are rawbinarycompressed, rawbinary, + base64appended, base64inline, ascii, + default is (rawbinarycompressed) + + Parameters + ---------- + my_write_mode : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> op.inputs.write_mode.connect(my_write_mode) + >>> # or + >>> op.inputs.write_mode(my_write_mode) + """ + return self._write_mode + + +class OutputsMigrateToVtu(_Outputs): + """Intermediate class used to get outputs from + migrate_to_vtu operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> # Connect inputs : op.inputs. ... + >>> result_path = op.outputs.path() + """ + + def __init__(self, op: Operator): + super().__init__(migrate_to_vtu._spec().outputs, op) + self._path = Output(migrate_to_vtu._spec().output_pin(0), 0, op) + self._outputs.append(self._path) + + @property + def path(self): + """Allows to get path output of the operator + + Returns + ---------- + my_path : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.migrate_to_vtu() + >>> # Connect inputs : op.inputs. ... + >>> result_path = op.outputs.path() + """ # noqa: E501 + return self._path diff --git a/ansys/dpf/core/operators/serialization/vtu_export.py b/ansys/dpf/core/operators/serialization/vtu_export.py new file mode 100644 index 00000000000..3b4dba037ab --- /dev/null +++ b/ansys/dpf/core/operators/serialization/vtu_export.py @@ -0,0 +1,381 @@ +""" +vtu_export +========== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class vtu_export(Operator): + """Export DPF data into vtu format. + + Parameters + ---------- + directory : str + Directory path + base_name : str, optional + Vtu base file name, (default is file) + mesh : MeshedRegion + Mesh + fields1 : Field or FieldsContainer + Fields (over time) to export + fields2 : Field or FieldsContainer + Fields (over time) to export + write_mode : str, optional + Available are rawbinarycompressed, rawbinary, + base64appended, base64inline, ascii, + default is (rawbinarycompressed) + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.vtu_export() + + >>> # Make input connections + >>> my_directory = str() + >>> op.inputs.directory.connect(my_directory) + >>> my_base_name = str() + >>> op.inputs.base_name.connect(my_base_name) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_fields1 = dpf.Field() + >>> op.inputs.fields1.connect(my_fields1) + >>> my_fields2 = dpf.Field() + >>> op.inputs.fields2.connect(my_fields2) + >>> my_write_mode = str() + >>> op.inputs.write_mode.connect(my_write_mode) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.vtu_export( + ... directory=my_directory, + ... base_name=my_base_name, + ... mesh=my_mesh, + ... fields1=my_fields1, + ... fields2=my_fields2, + ... write_mode=my_write_mode, + ... ) + + >>> # Get output data + >>> result_path = op.outputs.path() + """ + + def __init__( + self, + directory=None, + base_name=None, + mesh=None, + fields1=None, + fields2=None, + write_mode=None, + config=None, + server=None, + ): + super().__init__(name="vtu_export", config=config, server=server) + self._inputs = InputsVtuExport(self) + self._outputs = OutputsVtuExport(self) + if directory is not None: + self.inputs.directory.connect(directory) + if base_name is not None: + self.inputs.base_name.connect(base_name) + if mesh is not None: + self.inputs.mesh.connect(mesh) + if fields1 is not None: + self.inputs.fields1.connect(fields1) + if fields2 is not None: + self.inputs.fields2.connect(fields2) + if write_mode is not None: + self.inputs.write_mode.connect(write_mode) + + @staticmethod + def _spec(): + description = """Export DPF data into vtu format.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="directory", + type_names=["string"], + optional=False, + document="""Directory path""", + ), + 1: PinSpecification( + name="base_name", + type_names=["string"], + optional=True, + document="""Vtu base file name, (default is file)""", + ), + 2: PinSpecification( + name="mesh", + type_names=["abstract_meshed_region"], + optional=False, + document="""Mesh""", + ), + 3: PinSpecification( + name="fields", + type_names=["field", "fields_container"], + optional=False, + document="""Fields (over time) to export""", + ), + 4: PinSpecification( + name="fields", + type_names=["field", "fields_container"], + optional=False, + document="""Fields (over time) to export""", + ), + 100: PinSpecification( + name="write_mode", + type_names=["string"], + optional=True, + document="""Available are rawbinarycompressed, rawbinary, + base64appended, base64inline, ascii, + default is (rawbinarycompressed)""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="path", + type_names=["data_sources"], + optional=False, + document="""List of output vtu file path""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="vtu_export", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsVtuExport + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsVtuExport + """ + return super().outputs + + +class InputsVtuExport(_Inputs): + """Intermediate class used to connect user inputs to + vtu_export operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> my_directory = str() + >>> op.inputs.directory.connect(my_directory) + >>> my_base_name = str() + >>> op.inputs.base_name.connect(my_base_name) + >>> my_mesh = dpf.MeshedRegion() + >>> op.inputs.mesh.connect(my_mesh) + >>> my_fields1 = dpf.Field() + >>> op.inputs.fields1.connect(my_fields1) + >>> my_fields2 = dpf.Field() + >>> op.inputs.fields2.connect(my_fields2) + >>> my_write_mode = str() + >>> op.inputs.write_mode.connect(my_write_mode) + """ + + def __init__(self, op: Operator): + super().__init__(vtu_export._spec().inputs, op) + self._directory = Input(vtu_export._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._directory) + self._base_name = Input(vtu_export._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._base_name) + self._mesh = Input(vtu_export._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._mesh) + self._fields1 = Input(vtu_export._spec().input_pin(3), 3, op, 0) + self._inputs.append(self._fields1) + self._fields2 = Input(vtu_export._spec().input_pin(4), 4, op, 1) + self._inputs.append(self._fields2) + self._write_mode = Input(vtu_export._spec().input_pin(100), 100, op, -1) + self._inputs.append(self._write_mode) + + @property + def directory(self): + """Allows to connect directory input to the operator. + + Directory path + + Parameters + ---------- + my_directory : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> op.inputs.directory.connect(my_directory) + >>> # or + >>> op.inputs.directory(my_directory) + """ + return self._directory + + @property + def base_name(self): + """Allows to connect base_name input to the operator. + + Vtu base file name, (default is file) + + Parameters + ---------- + my_base_name : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> op.inputs.base_name.connect(my_base_name) + >>> # or + >>> op.inputs.base_name(my_base_name) + """ + return self._base_name + + @property + def mesh(self): + """Allows to connect mesh input to the operator. + + Mesh + + Parameters + ---------- + my_mesh : MeshedRegion + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> op.inputs.mesh.connect(my_mesh) + >>> # or + >>> op.inputs.mesh(my_mesh) + """ + return self._mesh + + @property + def fields1(self): + """Allows to connect fields1 input to the operator. + + Fields (over time) to export + + Parameters + ---------- + my_fields1 : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> op.inputs.fields1.connect(my_fields1) + >>> # or + >>> op.inputs.fields1(my_fields1) + """ + return self._fields1 + + @property + def fields2(self): + """Allows to connect fields2 input to the operator. + + Fields (over time) to export + + Parameters + ---------- + my_fields2 : Field or FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> op.inputs.fields2.connect(my_fields2) + >>> # or + >>> op.inputs.fields2(my_fields2) + """ + return self._fields2 + + @property + def write_mode(self): + """Allows to connect write_mode input to the operator. + + Available are rawbinarycompressed, rawbinary, + base64appended, base64inline, ascii, + default is (rawbinarycompressed) + + Parameters + ---------- + my_write_mode : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> op.inputs.write_mode.connect(my_write_mode) + >>> # or + >>> op.inputs.write_mode(my_write_mode) + """ + return self._write_mode + + +class OutputsVtuExport(_Outputs): + """Intermediate class used to get outputs from + vtu_export operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> # Connect inputs : op.inputs. ... + >>> result_path = op.outputs.path() + """ + + def __init__(self, op: Operator): + super().__init__(vtu_export._spec().outputs, op) + self._path = Output(vtu_export._spec().output_pin(0), 0, op) + self._outputs.append(self._path) + + @property + def path(self): + """Allows to get path output of the operator + + Returns + ---------- + my_path : DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.vtu_export() + >>> # Connect inputs : op.inputs. ... + >>> result_path = op.outputs.path() + """ # noqa: E501 + return self._path diff --git a/ansys/dpf/core/operators/serialization/workflow_export_json.py b/ansys/dpf/core/operators/serialization/workflow_export_json.py new file mode 100644 index 00000000000..b2fd84fdfb4 --- /dev/null +++ b/ansys/dpf/core/operators/serialization/workflow_export_json.py @@ -0,0 +1,230 @@ +""" +workflow_export_json +==================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.outputs import _modify_output_spec_with_one_type +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class workflow_export_json(Operator): + """Export a workflow in json format. + + Parameters + ---------- + workflow : Workflow + Workflow to serialize. + file_path : str, optional + File path to write results to. when given the + operator will return a data source to + the path, otherwise a json string + will be output. + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.workflow_export_json() + + >>> # Make input connections + >>> my_workflow = dpf.Workflow() + >>> op.inputs.workflow.connect(my_workflow) + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.workflow_export_json( + ... workflow=my_workflow, + ... file_path=my_file_path, + ... ) + + >>> # Get output data + >>> result_json_workflow = op.outputs.json_workflow() + """ + + def __init__(self, workflow=None, file_path=None, config=None, server=None): + super().__init__( + name="serialization::workflow_export_json", config=config, server=server + ) + self._inputs = InputsWorkflowExportJson(self) + self._outputs = OutputsWorkflowExportJson(self) + if workflow is not None: + self.inputs.workflow.connect(workflow) + if file_path is not None: + self.inputs.file_path.connect(file_path) + + @staticmethod + def _spec(): + description = """Export a workflow in json format.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="workflow", + type_names=["workflow"], + optional=False, + document="""Workflow to serialize.""", + ), + 1: PinSpecification( + name="file_path", + type_names=["string"], + optional=True, + document="""File path to write results to. when given the + operator will return a data source to + the path, otherwise a json string + will be output.""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="json_workflow", + type_names=["data_sources", "string"], + optional=False, + document="""Depending on the input of pin 1 the output + will either be a data source to a + json file or a json string.""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="serialization::workflow_export_json", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsWorkflowExportJson + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsWorkflowExportJson + """ + return super().outputs + + +class InputsWorkflowExportJson(_Inputs): + """Intermediate class used to connect user inputs to + workflow_export_json operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.workflow_export_json() + >>> my_workflow = dpf.Workflow() + >>> op.inputs.workflow.connect(my_workflow) + >>> my_file_path = str() + >>> op.inputs.file_path.connect(my_file_path) + """ + + def __init__(self, op: Operator): + super().__init__(workflow_export_json._spec().inputs, op) + self._workflow = Input(workflow_export_json._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._workflow) + self._file_path = Input(workflow_export_json._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._file_path) + + @property + def workflow(self): + """Allows to connect workflow input to the operator. + + Workflow to serialize. + + Parameters + ---------- + my_workflow : Workflow + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.workflow_export_json() + >>> op.inputs.workflow.connect(my_workflow) + >>> # or + >>> op.inputs.workflow(my_workflow) + """ + return self._workflow + + @property + def file_path(self): + """Allows to connect file_path input to the operator. + + File path to write results to. when given the + operator will return a data source to + the path, otherwise a json string + will be output. + + Parameters + ---------- + my_file_path : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.workflow_export_json() + >>> op.inputs.file_path.connect(my_file_path) + >>> # or + >>> op.inputs.file_path(my_file_path) + """ + return self._file_path + + +class OutputsWorkflowExportJson(_Outputs): + """Intermediate class used to get outputs from + workflow_export_json operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.workflow_export_json() + >>> # Connect inputs : op.inputs. ... + >>> result_json_workflow = op.outputs.json_workflow() + """ + + def __init__(self, op: Operator): + super().__init__(workflow_export_json._spec().outputs, op) + self.json_workflow_as_data_sources = Output( + _modify_output_spec_with_one_type( + workflow_export_json._spec().output_pin(0), "data_sources" + ), + 0, + op, + ) + self._outputs.append(self.json_workflow_as_data_sources) + self.json_workflow_as_string = Output( + _modify_output_spec_with_one_type( + workflow_export_json._spec().output_pin(0), "string" + ), + 0, + op, + ) + self._outputs.append(self.json_workflow_as_string) diff --git a/ansys/dpf/core/operators/serialization/workflow_import_json.py b/ansys/dpf/core/operators/serialization/workflow_import_json.py new file mode 100644 index 00000000000..ff0ed3254c2 --- /dev/null +++ b/ansys/dpf/core/operators/serialization/workflow_import_json.py @@ -0,0 +1,189 @@ +""" +workflow_import_json +==================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class workflow_import_json(Operator): + """Import a workflow in json format. + + Parameters + ---------- + json_workflow : str or DataSources + Input json data as either a data source or a + string + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.serialization.workflow_import_json() + + >>> # Make input connections + >>> my_json_workflow = str() + >>> op.inputs.json_workflow.connect(my_json_workflow) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.serialization.workflow_import_json( + ... json_workflow=my_json_workflow, + ... ) + + >>> # Get output data + >>> result_workflow = op.outputs.workflow() + """ + + def __init__(self, json_workflow=None, config=None, server=None): + super().__init__( + name="serialization::workflow_import_json", config=config, server=server + ) + self._inputs = InputsWorkflowImportJson(self) + self._outputs = OutputsWorkflowImportJson(self) + if json_workflow is not None: + self.inputs.json_workflow.connect(json_workflow) + + @staticmethod + def _spec(): + description = """Import a workflow in json format.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="json_workflow", + type_names=["string", "data_sources"], + optional=False, + document="""Input json data as either a data source or a + string""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="workflow", + type_names=["workflow"], + optional=False, + document="""Instantiate workflow.""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="serialization::workflow_import_json", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsWorkflowImportJson + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsWorkflowImportJson + """ + return super().outputs + + +class InputsWorkflowImportJson(_Inputs): + """Intermediate class used to connect user inputs to + workflow_import_json operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.workflow_import_json() + >>> my_json_workflow = str() + >>> op.inputs.json_workflow.connect(my_json_workflow) + """ + + def __init__(self, op: Operator): + super().__init__(workflow_import_json._spec().inputs, op) + self._json_workflow = Input( + workflow_import_json._spec().input_pin(0), 0, op, -1 + ) + self._inputs.append(self._json_workflow) + + @property + def json_workflow(self): + """Allows to connect json_workflow input to the operator. + + Input json data as either a data source or a + string + + Parameters + ---------- + my_json_workflow : str or DataSources + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.workflow_import_json() + >>> op.inputs.json_workflow.connect(my_json_workflow) + >>> # or + >>> op.inputs.json_workflow(my_json_workflow) + """ + return self._json_workflow + + +class OutputsWorkflowImportJson(_Outputs): + """Intermediate class used to get outputs from + workflow_import_json operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.workflow_import_json() + >>> # Connect inputs : op.inputs. ... + >>> result_workflow = op.outputs.workflow() + """ + + def __init__(self, op: Operator): + super().__init__(workflow_import_json._spec().outputs, op) + self._workflow = Output(workflow_import_json._spec().output_pin(0), 0, op) + self._outputs.append(self._workflow) + + @property + def workflow(self): + """Allows to get workflow output of the operator + + Returns + ---------- + my_workflow : Workflow + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.serialization.workflow_import_json() + >>> # Connect inputs : op.inputs. ... + >>> result_workflow = op.outputs.workflow() + """ # noqa: E501 + return self._workflow diff --git a/ansys/dpf/core/operators/utility/__init__.py b/ansys/dpf/core/operators/utility/__init__.py index 5488f446b2c..d755e9e4ad1 100644 --- a/ansys/dpf/core/operators/utility/__init__.py +++ b/ansys/dpf/core/operators/utility/__init__.py @@ -14,10 +14,13 @@ from .forward import forward from .txt_file_to_dpf import txt_file_to_dpf from .bind_support_fc import bind_support_fc +from .assemble_scalars_to_vectors import assemble_scalars_to_vectors +from .assemble_scalars_to_matrices import assemble_scalars_to_matrices from .default_value import default_value from .extract_time_freq import extract_time_freq from .python_generator import python_generator from .make_overall import make_overall +from .merge_weighted_fields_containers import merge_weighted_fields_containers from .merge_fields_containers import merge_fields_containers from .merge_scopings import merge_scopings from .merge_materials import merge_materials @@ -27,7 +30,9 @@ from .remote_operator_instantiate import remote_operator_instantiate from .merge_time_freq_supports import merge_time_freq_supports from .merge_fields_by_label import merge_fields_by_label +from .overlap_fields import overlap_fields from .merge_meshes import merge_meshes +from .merge_weighted_fields import merge_weighted_fields from .merge_fields import merge_fields from .merge_supports import merge_supports from .merge_meshes_containers import merge_meshes_containers diff --git a/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices.py b/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices.py new file mode 100644 index 00000000000..eddafc4f9f1 --- /dev/null +++ b/ansys/dpf/core/operators/utility/assemble_scalars_to_matrices.py @@ -0,0 +1,468 @@ +""" +assemble_scalars_to_matrices +============================ +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class assemble_scalars_to_matrices(Operator): + """Take nine scalar fields and assemble them as a 3x3 matrix field. + + Parameters + ---------- + xx : Field, optional + yy : Field, optional + zz : Field, optional + xy : Field, optional + yz : Field, optional + xz : Field, optional + yx : Field, optional + zy : Field, optional + zx : Field, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + + >>> # Make input connections + >>> my_xx = dpf.Field() + >>> op.inputs.xx.connect(my_xx) + >>> my_yy = dpf.Field() + >>> op.inputs.yy.connect(my_yy) + >>> my_zz = dpf.Field() + >>> op.inputs.zz.connect(my_zz) + >>> my_xy = dpf.Field() + >>> op.inputs.xy.connect(my_xy) + >>> my_yz = dpf.Field() + >>> op.inputs.yz.connect(my_yz) + >>> my_xz = dpf.Field() + >>> op.inputs.xz.connect(my_xz) + >>> my_yx = dpf.Field() + >>> op.inputs.yx.connect(my_yx) + >>> my_zy = dpf.Field() + >>> op.inputs.zy.connect(my_zy) + >>> my_zx = dpf.Field() + >>> op.inputs.zx.connect(my_zx) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.assemble_scalars_to_matrices( + ... xx=my_xx, + ... yy=my_yy, + ... zz=my_zz, + ... xy=my_xy, + ... yz=my_yz, + ... xz=my_xz, + ... yx=my_yx, + ... zy=my_zy, + ... zx=my_zx, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__( + self, + xx=None, + yy=None, + zz=None, + xy=None, + yz=None, + xz=None, + yx=None, + zy=None, + zx=None, + config=None, + server=None, + ): + super().__init__( + name="assemble_scalars_to_matrices", config=config, server=server + ) + self._inputs = InputsAssembleScalarsToMatrices(self) + self._outputs = OutputsAssembleScalarsToMatrices(self) + if xx is not None: + self.inputs.xx.connect(xx) + if yy is not None: + self.inputs.yy.connect(yy) + if zz is not None: + self.inputs.zz.connect(zz) + if xy is not None: + self.inputs.xy.connect(xy) + if yz is not None: + self.inputs.yz.connect(yz) + if xz is not None: + self.inputs.xz.connect(xz) + if yx is not None: + self.inputs.yx.connect(yx) + if zy is not None: + self.inputs.zy.connect(zy) + if zx is not None: + self.inputs.zx.connect(zx) + + @staticmethod + def _spec(): + description = ( + """Take nine scalar fields and assemble them as a 3x3 matrix field.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="xx", + type_names=["field"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="yy", + type_names=["field"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="zz", + type_names=["field"], + optional=True, + document="""""", + ), + 3: PinSpecification( + name="xy", + type_names=["field"], + optional=True, + document="""""", + ), + 4: PinSpecification( + name="yz", + type_names=["field"], + optional=True, + document="""""", + ), + 5: PinSpecification( + name="xz", + type_names=["field"], + optional=True, + document="""""", + ), + 6: PinSpecification( + name="yx", + type_names=["field"], + optional=True, + document="""""", + ), + 7: PinSpecification( + name="zy", + type_names=["field"], + optional=True, + document="""""", + ), + 8: PinSpecification( + name="zx", + type_names=["field"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="assemble_scalars_to_matrices", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsAssembleScalarsToMatrices + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsAssembleScalarsToMatrices + """ + return super().outputs + + +class InputsAssembleScalarsToMatrices(_Inputs): + """Intermediate class used to connect user inputs to + assemble_scalars_to_matrices operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> my_xx = dpf.Field() + >>> op.inputs.xx.connect(my_xx) + >>> my_yy = dpf.Field() + >>> op.inputs.yy.connect(my_yy) + >>> my_zz = dpf.Field() + >>> op.inputs.zz.connect(my_zz) + >>> my_xy = dpf.Field() + >>> op.inputs.xy.connect(my_xy) + >>> my_yz = dpf.Field() + >>> op.inputs.yz.connect(my_yz) + >>> my_xz = dpf.Field() + >>> op.inputs.xz.connect(my_xz) + >>> my_yx = dpf.Field() + >>> op.inputs.yx.connect(my_yx) + >>> my_zy = dpf.Field() + >>> op.inputs.zy.connect(my_zy) + >>> my_zx = dpf.Field() + >>> op.inputs.zx.connect(my_zx) + """ + + def __init__(self, op: Operator): + super().__init__(assemble_scalars_to_matrices._spec().inputs, op) + self._xx = Input(assemble_scalars_to_matrices._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._xx) + self._yy = Input(assemble_scalars_to_matrices._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._yy) + self._zz = Input(assemble_scalars_to_matrices._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._zz) + self._xy = Input(assemble_scalars_to_matrices._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._xy) + self._yz = Input(assemble_scalars_to_matrices._spec().input_pin(4), 4, op, -1) + self._inputs.append(self._yz) + self._xz = Input(assemble_scalars_to_matrices._spec().input_pin(5), 5, op, -1) + self._inputs.append(self._xz) + self._yx = Input(assemble_scalars_to_matrices._spec().input_pin(6), 6, op, -1) + self._inputs.append(self._yx) + self._zy = Input(assemble_scalars_to_matrices._spec().input_pin(7), 7, op, -1) + self._inputs.append(self._zy) + self._zx = Input(assemble_scalars_to_matrices._spec().input_pin(8), 8, op, -1) + self._inputs.append(self._zx) + + @property + def xx(self): + """Allows to connect xx input to the operator. + + Parameters + ---------- + my_xx : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.xx.connect(my_xx) + >>> # or + >>> op.inputs.xx(my_xx) + """ + return self._xx + + @property + def yy(self): + """Allows to connect yy input to the operator. + + Parameters + ---------- + my_yy : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.yy.connect(my_yy) + >>> # or + >>> op.inputs.yy(my_yy) + """ + return self._yy + + @property + def zz(self): + """Allows to connect zz input to the operator. + + Parameters + ---------- + my_zz : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.zz.connect(my_zz) + >>> # or + >>> op.inputs.zz(my_zz) + """ + return self._zz + + @property + def xy(self): + """Allows to connect xy input to the operator. + + Parameters + ---------- + my_xy : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.xy.connect(my_xy) + >>> # or + >>> op.inputs.xy(my_xy) + """ + return self._xy + + @property + def yz(self): + """Allows to connect yz input to the operator. + + Parameters + ---------- + my_yz : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.yz.connect(my_yz) + >>> # or + >>> op.inputs.yz(my_yz) + """ + return self._yz + + @property + def xz(self): + """Allows to connect xz input to the operator. + + Parameters + ---------- + my_xz : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.xz.connect(my_xz) + >>> # or + >>> op.inputs.xz(my_xz) + """ + return self._xz + + @property + def yx(self): + """Allows to connect yx input to the operator. + + Parameters + ---------- + my_yx : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.yx.connect(my_yx) + >>> # or + >>> op.inputs.yx(my_yx) + """ + return self._yx + + @property + def zy(self): + """Allows to connect zy input to the operator. + + Parameters + ---------- + my_zy : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.zy.connect(my_zy) + >>> # or + >>> op.inputs.zy(my_zy) + """ + return self._zy + + @property + def zx(self): + """Allows to connect zx input to the operator. + + Parameters + ---------- + my_zx : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> op.inputs.zx.connect(my_zx) + >>> # or + >>> op.inputs.zx(my_zx) + """ + return self._zx + + +class OutputsAssembleScalarsToMatrices(_Outputs): + """Intermediate class used to get outputs from + assemble_scalars_to_matrices operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(assemble_scalars_to_matrices._spec().outputs, op) + self._field = Output(assemble_scalars_to_matrices._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_matrices() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors.py b/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors.py new file mode 100644 index 00000000000..ca371885ab8 --- /dev/null +++ b/ansys/dpf/core/operators/utility/assemble_scalars_to_vectors.py @@ -0,0 +1,251 @@ +""" +assemble_scalars_to_vectors +=========================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class assemble_scalars_to_vectors(Operator): + """Take three scalar fields and assemble them as a 3d vector field. + + Parameters + ---------- + x : Field, optional + y : Field, optional + z : Field, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.assemble_scalars_to_vectors() + + >>> # Make input connections + >>> my_x = dpf.Field() + >>> op.inputs.x.connect(my_x) + >>> my_y = dpf.Field() + >>> op.inputs.y.connect(my_y) + >>> my_z = dpf.Field() + >>> op.inputs.z.connect(my_z) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.assemble_scalars_to_vectors( + ... x=my_x, + ... y=my_y, + ... z=my_z, + ... ) + + >>> # Get output data + >>> result_field = op.outputs.field() + """ + + def __init__(self, x=None, y=None, z=None, config=None, server=None): + super().__init__( + name="assemble_scalars_to_vectors", config=config, server=server + ) + self._inputs = InputsAssembleScalarsToVectors(self) + self._outputs = OutputsAssembleScalarsToVectors(self) + if x is not None: + self.inputs.x.connect(x) + if y is not None: + self.inputs.y.connect(y) + if z is not None: + self.inputs.z.connect(z) + + @staticmethod + def _spec(): + description = ( + """Take three scalar fields and assemble them as a 3d vector field.""" + ) + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="x", + type_names=["field"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="y", + type_names=["field"], + optional=True, + document="""""", + ), + 2: PinSpecification( + name="z", + type_names=["field"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="assemble_scalars_to_vectors", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsAssembleScalarsToVectors + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsAssembleScalarsToVectors + """ + return super().outputs + + +class InputsAssembleScalarsToVectors(_Inputs): + """Intermediate class used to connect user inputs to + assemble_scalars_to_vectors operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_vectors() + >>> my_x = dpf.Field() + >>> op.inputs.x.connect(my_x) + >>> my_y = dpf.Field() + >>> op.inputs.y.connect(my_y) + >>> my_z = dpf.Field() + >>> op.inputs.z.connect(my_z) + """ + + def __init__(self, op: Operator): + super().__init__(assemble_scalars_to_vectors._spec().inputs, op) + self._x = Input(assemble_scalars_to_vectors._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._x) + self._y = Input(assemble_scalars_to_vectors._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._y) + self._z = Input(assemble_scalars_to_vectors._spec().input_pin(2), 2, op, -1) + self._inputs.append(self._z) + + @property + def x(self): + """Allows to connect x input to the operator. + + Parameters + ---------- + my_x : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_vectors() + >>> op.inputs.x.connect(my_x) + >>> # or + >>> op.inputs.x(my_x) + """ + return self._x + + @property + def y(self): + """Allows to connect y input to the operator. + + Parameters + ---------- + my_y : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_vectors() + >>> op.inputs.y.connect(my_y) + >>> # or + >>> op.inputs.y(my_y) + """ + return self._y + + @property + def z(self): + """Allows to connect z input to the operator. + + Parameters + ---------- + my_z : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_vectors() + >>> op.inputs.z.connect(my_z) + >>> # or + >>> op.inputs.z(my_z) + """ + return self._z + + +class OutputsAssembleScalarsToVectors(_Outputs): + """Intermediate class used to get outputs from + assemble_scalars_to_vectors operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_vectors() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ + + def __init__(self, op: Operator): + super().__init__(assemble_scalars_to_vectors._spec().outputs, op) + self._field = Output(assemble_scalars_to_vectors._spec().output_pin(0), 0, op) + self._outputs.append(self._field) + + @property + def field(self): + """Allows to get field output of the operator + + Returns + ---------- + my_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.assemble_scalars_to_vectors() + >>> # Connect inputs : op.inputs. ... + >>> result_field = op.outputs.field() + """ # noqa: E501 + return self._field diff --git a/ansys/dpf/core/operators/utility/merge_fields_by_label.py b/ansys/dpf/core/operators/utility/merge_fields_by_label.py index 4fc3c6c1f98..4214c54c051 100644 --- a/ansys/dpf/core/operators/utility/merge_fields_by_label.py +++ b/ansys/dpf/core/operators/utility/merge_fields_by_label.py @@ -22,7 +22,7 @@ class merge_fields_by_label(Operator): merged_field_support : AbstractFieldSupport, optional The fieldscontainer's support that has already been merged. - sumMerge : bool, optional + sum_merge : bool, optional Default is false. if true redundant quantities are summed instead of being ignored. @@ -42,15 +42,15 @@ class merge_fields_by_label(Operator): >>> op.inputs.label.connect(my_label) >>> my_merged_field_support = dpf.AbstractFieldSupport() >>> op.inputs.merged_field_support.connect(my_merged_field_support) - >>> my_sumMerge = bool() - >>> op.inputs.sumMerge.connect(my_sumMerge) + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.utility.merge_fields_by_label( ... fields_container=my_fields_container, ... label=my_label, ... merged_field_support=my_merged_field_support, - ... sumMerge=my_sumMerge, + ... sum_merge=my_sum_merge, ... ) >>> # Get output data @@ -63,7 +63,7 @@ def __init__( fields_container=None, label=None, merged_field_support=None, - sumMerge=None, + sum_merge=None, config=None, server=None, ): @@ -78,8 +78,8 @@ def __init__( self.inputs.label.connect(label) if merged_field_support is not None: self.inputs.merged_field_support.connect(merged_field_support) - if sumMerge is not None: - self.inputs.sumMerge.connect(sumMerge) + if sum_merge is not None: + self.inputs.sum_merge.connect(sum_merge) @staticmethod def _spec(): @@ -108,7 +108,7 @@ def _spec(): already been merged.""", ), 3: PinSpecification( - name="sumMerge", + name="sum_merge", type_names=["bool"], optional=True, document="""Default is false. if true redundant @@ -186,8 +186,8 @@ class InputsMergeFieldsByLabel(_Inputs): >>> op.inputs.label.connect(my_label) >>> my_merged_field_support = dpf.AbstractFieldSupport() >>> op.inputs.merged_field_support.connect(my_merged_field_support) - >>> my_sumMerge = bool() - >>> op.inputs.sumMerge.connect(my_sumMerge) + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) """ def __init__(self, op: Operator): @@ -202,8 +202,8 @@ def __init__(self, op: Operator): merge_fields_by_label._spec().input_pin(2), 2, op, -1 ) self._inputs.append(self._merged_field_support) - self._sumMerge = Input(merge_fields_by_label._spec().input_pin(3), 3, op, -1) - self._inputs.append(self._sumMerge) + self._sum_merge = Input(merge_fields_by_label._spec().input_pin(3), 3, op, -1) + self._inputs.append(self._sum_merge) @property def fields_container(self): @@ -265,8 +265,8 @@ def merged_field_support(self): return self._merged_field_support @property - def sumMerge(self): - """Allows to connect sumMerge input to the operator. + def sum_merge(self): + """Allows to connect sum_merge input to the operator. Default is false. if true redundant quantities are summed instead of @@ -274,17 +274,17 @@ def sumMerge(self): Parameters ---------- - my_sumMerge : bool + my_sum_merge : bool Examples -------- >>> from ansys.dpf import core as dpf >>> op = dpf.operators.utility.merge_fields_by_label() - >>> op.inputs.sumMerge.connect(my_sumMerge) + >>> op.inputs.sum_merge.connect(my_sum_merge) >>> # or - >>> op.inputs.sumMerge(my_sumMerge) + >>> op.inputs.sum_merge(my_sum_merge) """ - return self._sumMerge + return self._sum_merge class OutputsMergeFieldsByLabel(_Outputs): diff --git a/ansys/dpf/core/operators/utility/merge_fields_containers.py b/ansys/dpf/core/operators/utility/merge_fields_containers.py index 702299b9cf3..99f047ab425 100644 --- a/ansys/dpf/core/operators/utility/merge_fields_containers.py +++ b/ansys/dpf/core/operators/utility/merge_fields_containers.py @@ -15,6 +15,10 @@ class merge_fields_containers(Operator): Parameters ---------- + sum_merge : bool, optional + Default is false. if true redundant + quantities are summed instead of + being ignored. merged_fields_support : AbstractFieldSupport, optional Already merged field support. merged_fields_containers_support : AbstractFieldSupport, optional @@ -35,6 +39,8 @@ class merge_fields_containers(Operator): >>> op = dpf.operators.utility.merge_fields_containers() >>> # Make input connections + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) >>> my_merged_fields_support = dpf.AbstractFieldSupport() >>> op.inputs.merged_fields_support.connect(my_merged_fields_support) >>> my_merged_fields_containers_support = dpf.AbstractFieldSupport() @@ -46,6 +52,7 @@ class merge_fields_containers(Operator): >>> # Instantiate operator and connect inputs in one line >>> op = dpf.operators.utility.merge_fields_containers( + ... sum_merge=my_sum_merge, ... merged_fields_support=my_merged_fields_support, ... merged_fields_containers_support=my_merged_fields_containers_support, ... fields_containers1=my_fields_containers1, @@ -58,6 +65,7 @@ class merge_fields_containers(Operator): def __init__( self, + sum_merge=None, merged_fields_support=None, merged_fields_containers_support=None, fields_containers1=None, @@ -68,6 +76,8 @@ def __init__( super().__init__(name="merge::fields_container", config=config, server=server) self._inputs = InputsMergeFieldsContainers(self) self._outputs = OutputsMergeFieldsContainers(self) + if sum_merge is not None: + self.inputs.sum_merge.connect(sum_merge) if merged_fields_support is not None: self.inputs.merged_fields_support.connect(merged_fields_support) if merged_fields_containers_support is not None: @@ -87,6 +97,14 @@ def _spec(): spec = Specification( description=description, map_input_pin_spec={ + -3: PinSpecification( + name="sum_merge", + type_names=["bool"], + optional=True, + document="""Default is false. if true redundant + quantities are summed instead of + being ignored.""", + ), -2: PinSpecification( name="merged_fields_support", type_names=["abstract_field_support"], @@ -173,6 +191,8 @@ class InputsMergeFieldsContainers(_Inputs): -------- >>> from ansys.dpf import core as dpf >>> op = dpf.operators.utility.merge_fields_containers() + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) >>> my_merged_fields_support = dpf.AbstractFieldSupport() >>> op.inputs.merged_fields_support.connect(my_merged_fields_support) >>> my_merged_fields_containers_support = dpf.AbstractFieldSupport() @@ -185,6 +205,10 @@ class InputsMergeFieldsContainers(_Inputs): def __init__(self, op: Operator): super().__init__(merge_fields_containers._spec().inputs, op) + self._sum_merge = Input( + merge_fields_containers._spec().input_pin(-3), -3, op, -1 + ) + self._inputs.append(self._sum_merge) self._merged_fields_support = Input( merge_fields_containers._spec().input_pin(-2), -2, op, -1 ) @@ -202,6 +226,28 @@ def __init__(self, op: Operator): ) self._inputs.append(self._fields_containers2) + @property + def sum_merge(self): + """Allows to connect sum_merge input to the operator. + + Default is false. if true redundant + quantities are summed instead of + being ignored. + + Parameters + ---------- + my_sum_merge : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_fields_containers() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> # or + >>> op.inputs.sum_merge(my_sum_merge) + """ + return self._sum_merge + @property def merged_fields_support(self): """Allows to connect merged_fields_support input to the operator. diff --git a/ansys/dpf/core/operators/utility/merge_weighted_fields.py b/ansys/dpf/core/operators/utility/merge_weighted_fields.py new file mode 100644 index 00000000000..0e40dda781b --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_weighted_fields.py @@ -0,0 +1,401 @@ +""" +merge_weighted_fields +===================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_weighted_fields(Operator): + """Take a set of fields and assemble them in a unique one applying a + weight on the sum of the fields. + + Parameters + ---------- + sum_merge : bool, optional + Default is false. if true redundant + quantities are summed instead of + being ignored. + merged_support : AbstractFieldSupport, optional + Already merged field support. + fields1 : Field + A vector of fields to merge or fields from + pin 0 to ... + fields2 : Field + A vector of fields to merge or fields from + pin 0 to ... + weights1 : PropertyField + Weights to apply to each field from pin 1000 + to ... + weights2 : PropertyField + Weights to apply to each field from pin 1000 + to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_weighted_fields() + + >>> # Make input connections + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> my_merged_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_support.connect(my_merged_support) + >>> my_fields1 = dpf.Field() + >>> op.inputs.fields1.connect(my_fields1) + >>> my_fields2 = dpf.Field() + >>> op.inputs.fields2.connect(my_fields2) + >>> my_weights1 = dpf.PropertyField() + >>> op.inputs.weights1.connect(my_weights1) + >>> my_weights2 = dpf.PropertyField() + >>> op.inputs.weights2.connect(my_weights2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_weighted_fields( + ... sum_merge=my_sum_merge, + ... merged_support=my_merged_support, + ... fields1=my_fields1, + ... fields2=my_fields2, + ... weights1=my_weights1, + ... weights2=my_weights2, + ... ) + + >>> # Get output data + >>> result_merged_field = op.outputs.merged_field() + """ + + def __init__( + self, + sum_merge=None, + merged_support=None, + fields1=None, + fields2=None, + weights1=None, + weights2=None, + config=None, + server=None, + ): + super().__init__(name="merge::weighted_field", config=config, server=server) + self._inputs = InputsMergeWeightedFields(self) + self._outputs = OutputsMergeWeightedFields(self) + if sum_merge is not None: + self.inputs.sum_merge.connect(sum_merge) + if merged_support is not None: + self.inputs.merged_support.connect(merged_support) + if fields1 is not None: + self.inputs.fields1.connect(fields1) + if fields2 is not None: + self.inputs.fields2.connect(fields2) + if weights1 is not None: + self.inputs.weights1.connect(weights1) + if weights2 is not None: + self.inputs.weights2.connect(weights2) + + @staticmethod + def _spec(): + description = """Take a set of fields and assemble them in a unique one applying a + weight on the sum of the fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + -2: PinSpecification( + name="sum_merge", + type_names=["bool"], + optional=True, + document="""Default is false. if true redundant + quantities are summed instead of + being ignored.""", + ), + -1: PinSpecification( + name="merged_support", + type_names=["abstract_field_support"], + optional=True, + document="""Already merged field support.""", + ), + 0: PinSpecification( + name="fields", + type_names=["field"], + optional=False, + document="""A vector of fields to merge or fields from + pin 0 to ...""", + ), + 1: PinSpecification( + name="fields", + type_names=["field"], + optional=False, + document="""A vector of fields to merge or fields from + pin 0 to ...""", + ), + 1000: PinSpecification( + name="weights", + type_names=["property_field"], + optional=False, + document="""Weights to apply to each field from pin 1000 + to ...""", + ), + 1001: PinSpecification( + name="weights", + type_names=["property_field"], + optional=False, + document="""Weights to apply to each field from pin 1000 + to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_field", + type_names=["field"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="merge::weighted_field", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeWeightedFields + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeWeightedFields + """ + return super().outputs + + +class InputsMergeWeightedFields(_Inputs): + """Intermediate class used to connect user inputs to + merge_weighted_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> my_merged_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_support.connect(my_merged_support) + >>> my_fields1 = dpf.Field() + >>> op.inputs.fields1.connect(my_fields1) + >>> my_fields2 = dpf.Field() + >>> op.inputs.fields2.connect(my_fields2) + >>> my_weights1 = dpf.PropertyField() + >>> op.inputs.weights1.connect(my_weights1) + >>> my_weights2 = dpf.PropertyField() + >>> op.inputs.weights2.connect(my_weights2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_weighted_fields._spec().inputs, op) + self._sum_merge = Input(merge_weighted_fields._spec().input_pin(-2), -2, op, -1) + self._inputs.append(self._sum_merge) + self._merged_support = Input( + merge_weighted_fields._spec().input_pin(-1), -1, op, -1 + ) + self._inputs.append(self._merged_support) + self._fields1 = Input(merge_weighted_fields._spec().input_pin(0), 0, op, 0) + self._inputs.append(self._fields1) + self._fields2 = Input(merge_weighted_fields._spec().input_pin(1), 1, op, 1) + self._inputs.append(self._fields2) + self._weights1 = Input( + merge_weighted_fields._spec().input_pin(1000), 1000, op, 0 + ) + self._inputs.append(self._weights1) + self._weights2 = Input( + merge_weighted_fields._spec().input_pin(1001), 1001, op, 1 + ) + self._inputs.append(self._weights2) + + @property + def sum_merge(self): + """Allows to connect sum_merge input to the operator. + + Default is false. if true redundant + quantities are summed instead of + being ignored. + + Parameters + ---------- + my_sum_merge : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> # or + >>> op.inputs.sum_merge(my_sum_merge) + """ + return self._sum_merge + + @property + def merged_support(self): + """Allows to connect merged_support input to the operator. + + Already merged field support. + + Parameters + ---------- + my_merged_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> op.inputs.merged_support.connect(my_merged_support) + >>> # or + >>> op.inputs.merged_support(my_merged_support) + """ + return self._merged_support + + @property + def fields1(self): + """Allows to connect fields1 input to the operator. + + A vector of fields to merge or fields from + pin 0 to ... + + Parameters + ---------- + my_fields1 : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> op.inputs.fields1.connect(my_fields1) + >>> # or + >>> op.inputs.fields1(my_fields1) + """ + return self._fields1 + + @property + def fields2(self): + """Allows to connect fields2 input to the operator. + + A vector of fields to merge or fields from + pin 0 to ... + + Parameters + ---------- + my_fields2 : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> op.inputs.fields2.connect(my_fields2) + >>> # or + >>> op.inputs.fields2(my_fields2) + """ + return self._fields2 + + @property + def weights1(self): + """Allows to connect weights1 input to the operator. + + Weights to apply to each field from pin 1000 + to ... + + Parameters + ---------- + my_weights1 : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> op.inputs.weights1.connect(my_weights1) + >>> # or + >>> op.inputs.weights1(my_weights1) + """ + return self._weights1 + + @property + def weights2(self): + """Allows to connect weights2 input to the operator. + + Weights to apply to each field from pin 1000 + to ... + + Parameters + ---------- + my_weights2 : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> op.inputs.weights2.connect(my_weights2) + >>> # or + >>> op.inputs.weights2(my_weights2) + """ + return self._weights2 + + +class OutputsMergeWeightedFields(_Outputs): + """Intermediate class used to get outputs from + merge_weighted_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_field = op.outputs.merged_field() + """ + + def __init__(self, op: Operator): + super().__init__(merge_weighted_fields._spec().outputs, op) + self._merged_field = Output(merge_weighted_fields._spec().output_pin(0), 0, op) + self._outputs.append(self._merged_field) + + @property + def merged_field(self): + """Allows to get merged_field output of the operator + + Returns + ---------- + my_merged_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_field = op.outputs.merged_field() + """ # noqa: E501 + return self._merged_field diff --git a/ansys/dpf/core/operators/utility/merge_weighted_fields_containers.py b/ansys/dpf/core/operators/utility/merge_weighted_fields_containers.py new file mode 100644 index 00000000000..9e5a22efcd8 --- /dev/null +++ b/ansys/dpf/core/operators/utility/merge_weighted_fields_containers.py @@ -0,0 +1,458 @@ +""" +merge_weighted_fields_containers +================================ +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class merge_weighted_fields_containers(Operator): + """Take a set of fields containers and assemble them in a unique one + applying a weight on the sum of the fields. + + Parameters + ---------- + sum_merge : bool, optional + Default is false. if true redundant + quantities are summed instead of + being ignored. + merged_fields_support : AbstractFieldSupport, optional + Already merged field support. + merged_fields_containers_support : AbstractFieldSupport, optional + Already merged fields containers support. + fields_containers1 : FieldsContainer + A vector of fields containers to merge or + fields containers from pin 0 to ... + fields_containers2 : FieldsContainer + A vector of fields containers to merge or + fields containers from pin 0 to ... + weights1 : PropertyField + Weights to apply to each field from pin 1000 + to ... + weights2 : PropertyField + Weights to apply to each field from pin 1000 + to ... + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + + >>> # Make input connections + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> my_merged_fields_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_fields_support.connect(my_merged_fields_support) + >>> my_merged_fields_containers_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_fields_containers_support.connect(my_merged_fields_containers_support) + >>> my_fields_containers1 = dpf.FieldsContainer() + >>> op.inputs.fields_containers1.connect(my_fields_containers1) + >>> my_fields_containers2 = dpf.FieldsContainer() + >>> op.inputs.fields_containers2.connect(my_fields_containers2) + >>> my_weights1 = dpf.PropertyField() + >>> op.inputs.weights1.connect(my_weights1) + >>> my_weights2 = dpf.PropertyField() + >>> op.inputs.weights2.connect(my_weights2) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.merge_weighted_fields_containers( + ... sum_merge=my_sum_merge, + ... merged_fields_support=my_merged_fields_support, + ... merged_fields_containers_support=my_merged_fields_containers_support, + ... fields_containers1=my_fields_containers1, + ... fields_containers2=my_fields_containers2, + ... weights1=my_weights1, + ... weights2=my_weights2, + ... ) + + >>> # Get output data + >>> result_merged_fields_container = op.outputs.merged_fields_container() + """ + + def __init__( + self, + sum_merge=None, + merged_fields_support=None, + merged_fields_containers_support=None, + fields_containers1=None, + fields_containers2=None, + weights1=None, + weights2=None, + config=None, + server=None, + ): + super().__init__( + name="merge::weighted_fields_container", config=config, server=server + ) + self._inputs = InputsMergeWeightedFieldsContainers(self) + self._outputs = OutputsMergeWeightedFieldsContainers(self) + if sum_merge is not None: + self.inputs.sum_merge.connect(sum_merge) + if merged_fields_support is not None: + self.inputs.merged_fields_support.connect(merged_fields_support) + if merged_fields_containers_support is not None: + self.inputs.merged_fields_containers_support.connect( + merged_fields_containers_support + ) + if fields_containers1 is not None: + self.inputs.fields_containers1.connect(fields_containers1) + if fields_containers2 is not None: + self.inputs.fields_containers2.connect(fields_containers2) + if weights1 is not None: + self.inputs.weights1.connect(weights1) + if weights2 is not None: + self.inputs.weights2.connect(weights2) + + @staticmethod + def _spec(): + description = """Take a set of fields containers and assemble them in a unique one + applying a weight on the sum of the fields.""" + spec = Specification( + description=description, + map_input_pin_spec={ + -3: PinSpecification( + name="sum_merge", + type_names=["bool"], + optional=True, + document="""Default is false. if true redundant + quantities are summed instead of + being ignored.""", + ), + -2: PinSpecification( + name="merged_fields_support", + type_names=["abstract_field_support"], + optional=True, + document="""Already merged field support.""", + ), + -1: PinSpecification( + name="merged_fields_containers_support", + type_names=[ + "abstract_field_support", + "umap>", + ], + optional=True, + document="""Already merged fields containers support.""", + ), + 0: PinSpecification( + name="fields_containers", + type_names=["fields_container"], + optional=False, + document="""A vector of fields containers to merge or + fields containers from pin 0 to ...""", + ), + 1: PinSpecification( + name="fields_containers", + type_names=["fields_container"], + optional=False, + document="""A vector of fields containers to merge or + fields containers from pin 0 to ...""", + ), + 1000: PinSpecification( + name="weights", + type_names=["property_field"], + optional=False, + document="""Weights to apply to each field from pin 1000 + to ...""", + ), + 1001: PinSpecification( + name="weights", + type_names=["property_field"], + optional=False, + document="""Weights to apply to each field from pin 1000 + to ...""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="merged_fields_container", + type_names=["fields_container"], + optional=False, + document="""""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="merge::weighted_fields_container", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsMergeWeightedFieldsContainers + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsMergeWeightedFieldsContainers + """ + return super().outputs + + +class InputsMergeWeightedFieldsContainers(_Inputs): + """Intermediate class used to connect user inputs to + merge_weighted_fields_containers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> my_sum_merge = bool() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> my_merged_fields_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_fields_support.connect(my_merged_fields_support) + >>> my_merged_fields_containers_support = dpf.AbstractFieldSupport() + >>> op.inputs.merged_fields_containers_support.connect(my_merged_fields_containers_support) + >>> my_fields_containers1 = dpf.FieldsContainer() + >>> op.inputs.fields_containers1.connect(my_fields_containers1) + >>> my_fields_containers2 = dpf.FieldsContainer() + >>> op.inputs.fields_containers2.connect(my_fields_containers2) + >>> my_weights1 = dpf.PropertyField() + >>> op.inputs.weights1.connect(my_weights1) + >>> my_weights2 = dpf.PropertyField() + >>> op.inputs.weights2.connect(my_weights2) + """ + + def __init__(self, op: Operator): + super().__init__(merge_weighted_fields_containers._spec().inputs, op) + self._sum_merge = Input( + merge_weighted_fields_containers._spec().input_pin(-3), -3, op, -1 + ) + self._inputs.append(self._sum_merge) + self._merged_fields_support = Input( + merge_weighted_fields_containers._spec().input_pin(-2), -2, op, -1 + ) + self._inputs.append(self._merged_fields_support) + self._merged_fields_containers_support = Input( + merge_weighted_fields_containers._spec().input_pin(-1), -1, op, -1 + ) + self._inputs.append(self._merged_fields_containers_support) + self._fields_containers1 = Input( + merge_weighted_fields_containers._spec().input_pin(0), 0, op, 0 + ) + self._inputs.append(self._fields_containers1) + self._fields_containers2 = Input( + merge_weighted_fields_containers._spec().input_pin(1), 1, op, 1 + ) + self._inputs.append(self._fields_containers2) + self._weights1 = Input( + merge_weighted_fields_containers._spec().input_pin(1000), 1000, op, 0 + ) + self._inputs.append(self._weights1) + self._weights2 = Input( + merge_weighted_fields_containers._spec().input_pin(1001), 1001, op, 1 + ) + self._inputs.append(self._weights2) + + @property + def sum_merge(self): + """Allows to connect sum_merge input to the operator. + + Default is false. if true redundant + quantities are summed instead of + being ignored. + + Parameters + ---------- + my_sum_merge : bool + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> op.inputs.sum_merge.connect(my_sum_merge) + >>> # or + >>> op.inputs.sum_merge(my_sum_merge) + """ + return self._sum_merge + + @property + def merged_fields_support(self): + """Allows to connect merged_fields_support input to the operator. + + Already merged field support. + + Parameters + ---------- + my_merged_fields_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> op.inputs.merged_fields_support.connect(my_merged_fields_support) + >>> # or + >>> op.inputs.merged_fields_support(my_merged_fields_support) + """ + return self._merged_fields_support + + @property + def merged_fields_containers_support(self): + """Allows to connect merged_fields_containers_support input to the operator. + + Already merged fields containers support. + + Parameters + ---------- + my_merged_fields_containers_support : AbstractFieldSupport + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> op.inputs.merged_fields_containers_support.connect(my_merged_fields_containers_support) + >>> # or + >>> op.inputs.merged_fields_containers_support(my_merged_fields_containers_support) + """ + return self._merged_fields_containers_support + + @property + def fields_containers1(self): + """Allows to connect fields_containers1 input to the operator. + + A vector of fields containers to merge or + fields containers from pin 0 to ... + + Parameters + ---------- + my_fields_containers1 : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> op.inputs.fields_containers1.connect(my_fields_containers1) + >>> # or + >>> op.inputs.fields_containers1(my_fields_containers1) + """ + return self._fields_containers1 + + @property + def fields_containers2(self): + """Allows to connect fields_containers2 input to the operator. + + A vector of fields containers to merge or + fields containers from pin 0 to ... + + Parameters + ---------- + my_fields_containers2 : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> op.inputs.fields_containers2.connect(my_fields_containers2) + >>> # or + >>> op.inputs.fields_containers2(my_fields_containers2) + """ + return self._fields_containers2 + + @property + def weights1(self): + """Allows to connect weights1 input to the operator. + + Weights to apply to each field from pin 1000 + to ... + + Parameters + ---------- + my_weights1 : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> op.inputs.weights1.connect(my_weights1) + >>> # or + >>> op.inputs.weights1(my_weights1) + """ + return self._weights1 + + @property + def weights2(self): + """Allows to connect weights2 input to the operator. + + Weights to apply to each field from pin 1000 + to ... + + Parameters + ---------- + my_weights2 : PropertyField + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> op.inputs.weights2.connect(my_weights2) + >>> # or + >>> op.inputs.weights2(my_weights2) + """ + return self._weights2 + + +class OutputsMergeWeightedFieldsContainers(_Outputs): + """Intermediate class used to get outputs from + merge_weighted_fields_containers operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_fields_container = op.outputs.merged_fields_container() + """ + + def __init__(self, op: Operator): + super().__init__(merge_weighted_fields_containers._spec().outputs, op) + self._merged_fields_container = Output( + merge_weighted_fields_containers._spec().output_pin(0), 0, op + ) + self._outputs.append(self._merged_fields_container) + + @property + def merged_fields_container(self): + """Allows to get merged_fields_container output of the operator + + Returns + ---------- + my_merged_fields_container : FieldsContainer + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.merge_weighted_fields_containers() + >>> # Connect inputs : op.inputs. ... + >>> result_merged_fields_container = op.outputs.merged_fields_container() + """ # noqa: E501 + return self._merged_fields_container diff --git a/ansys/dpf/core/operators/utility/overlap_fields.py b/ansys/dpf/core/operators/utility/overlap_fields.py new file mode 100644 index 00000000000..1554c95077f --- /dev/null +++ b/ansys/dpf/core/operators/utility/overlap_fields.py @@ -0,0 +1,186 @@ +""" +overlap_fields +============== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class overlap_fields(Operator): + """Take two fields and superpose them, the overlapping field will + override values of base_field. + + Parameters + ---------- + base_field : Field, optional + overlapping_field : Field, optional + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.overlap_fields() + + >>> # Make input connections + >>> my_base_field = dpf.Field() + >>> op.inputs.base_field.connect(my_base_field) + >>> my_overlapping_field = dpf.Field() + >>> op.inputs.overlapping_field.connect(my_overlapping_field) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.overlap_fields( + ... base_field=my_base_field, + ... overlapping_field=my_overlapping_field, + ... ) + + """ + + def __init__( + self, base_field=None, overlapping_field=None, config=None, server=None + ): + super().__init__(name="overlap_fields", config=config, server=server) + self._inputs = InputsOverlapFields(self) + self._outputs = OutputsOverlapFields(self) + if base_field is not None: + self.inputs.base_field.connect(base_field) + if overlapping_field is not None: + self.inputs.overlapping_field.connect(overlapping_field) + + @staticmethod + def _spec(): + description = """Take two fields and superpose them, the overlapping field will + override values of base_field.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="base_field", + type_names=["field"], + optional=True, + document="""""", + ), + 1: PinSpecification( + name="overlapping_field", + type_names=["field"], + optional=True, + document="""""", + ), + }, + map_output_pin_spec={}, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config(name="overlap_fields", server=server) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsOverlapFields + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsOverlapFields + """ + return super().outputs + + +class InputsOverlapFields(_Inputs): + """Intermediate class used to connect user inputs to + overlap_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.overlap_fields() + >>> my_base_field = dpf.Field() + >>> op.inputs.base_field.connect(my_base_field) + >>> my_overlapping_field = dpf.Field() + >>> op.inputs.overlapping_field.connect(my_overlapping_field) + """ + + def __init__(self, op: Operator): + super().__init__(overlap_fields._spec().inputs, op) + self._base_field = Input(overlap_fields._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._base_field) + self._overlapping_field = Input(overlap_fields._spec().input_pin(1), 1, op, -1) + self._inputs.append(self._overlapping_field) + + @property + def base_field(self): + """Allows to connect base_field input to the operator. + + Parameters + ---------- + my_base_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.overlap_fields() + >>> op.inputs.base_field.connect(my_base_field) + >>> # or + >>> op.inputs.base_field(my_base_field) + """ + return self._base_field + + @property + def overlapping_field(self): + """Allows to connect overlapping_field input to the operator. + + Parameters + ---------- + my_overlapping_field : Field + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.overlap_fields() + >>> op.inputs.overlapping_field.connect(my_overlapping_field) + >>> # or + >>> op.inputs.overlapping_field(my_overlapping_field) + """ + return self._overlapping_field + + +class OutputsOverlapFields(_Outputs): + """Intermediate class used to get outputs from + overlap_fields operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.overlap_fields() + >>> # Connect inputs : op.inputs. ... + """ + + def __init__(self, op: Operator): + super().__init__(overlap_fields._spec().outputs, op) diff --git a/ansys/dpf/core/operators/utility/python_script_exec.py b/ansys/dpf/core/operators/utility/python_script_exec.py new file mode 100644 index 00000000000..9bc8f372499 --- /dev/null +++ b/ansys/dpf/core/operators/utility/python_script_exec.py @@ -0,0 +1,184 @@ +""" +python_script_exec +================== +Autogenerated DPF operator classes. +""" +from warnings import warn +from ansys.dpf.core.dpf_operator import Operator +from ansys.dpf.core.inputs import Input, _Inputs +from ansys.dpf.core.outputs import Output, _Outputs +from ansys.dpf.core.operators.specification import PinSpecification, Specification + + +class python_script_exec(Operator): + """Execute python input script. + + Parameters + ---------- + python_script : str + Input python script + + + Examples + -------- + >>> from ansys.dpf import core as dpf + + >>> # Instantiate operator + >>> op = dpf.operators.utility.python_script_exec() + + >>> # Make input connections + >>> my_python_script = str() + >>> op.inputs.python_script.connect(my_python_script) + + >>> # Instantiate operator and connect inputs in one line + >>> op = dpf.operators.utility.python_script_exec( + ... python_script=my_python_script, + ... ) + + >>> # Get output data + >>> result_output = op.outputs.output() + """ + + def __init__(self, python_script=None, config=None, server=None): + super().__init__( + name="utility::python_script_exec", config=config, server=server + ) + self._inputs = InputsPythonScriptExec(self) + self._outputs = OutputsPythonScriptExec(self) + if python_script is not None: + self.inputs.python_script.connect(python_script) + + @staticmethod + def _spec(): + description = """Execute python input script.""" + spec = Specification( + description=description, + map_input_pin_spec={ + 0: PinSpecification( + name="python_script", + type_names=["string"], + optional=False, + document="""Input python script""", + ), + }, + map_output_pin_spec={ + 0: PinSpecification( + name="output", + type_names=["any"], + optional=False, + document="""The output can be of any supported type""", + ), + }, + ) + return spec + + @staticmethod + def default_config(server=None): + """Returns the default config of the operator. + + This config can then be changed to the user needs and be used to + instantiate the operator. The Configuration allows to customize + how the operation will be processed by the operator. + + Parameters + ---------- + server : server.DPFServer, optional + Server with channel connected to the remote or local instance. When + ``None``, attempts to use the global server. + """ + return Operator.default_config( + name="utility::python_script_exec", server=server + ) + + @property + def inputs(self): + """Enables to connect inputs to the operator + + Returns + -------- + inputs : InputsPythonScriptExec + """ + return super().inputs + + @property + def outputs(self): + """Enables to get outputs of the operator by evaluationg it + + Returns + -------- + outputs : OutputsPythonScriptExec + """ + return super().outputs + + +class InputsPythonScriptExec(_Inputs): + """Intermediate class used to connect user inputs to + python_script_exec operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.python_script_exec() + >>> my_python_script = str() + >>> op.inputs.python_script.connect(my_python_script) + """ + + def __init__(self, op: Operator): + super().__init__(python_script_exec._spec().inputs, op) + self._python_script = Input(python_script_exec._spec().input_pin(0), 0, op, -1) + self._inputs.append(self._python_script) + + @property + def python_script(self): + """Allows to connect python_script input to the operator. + + Input python script + + Parameters + ---------- + my_python_script : str + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.python_script_exec() + >>> op.inputs.python_script.connect(my_python_script) + >>> # or + >>> op.inputs.python_script(my_python_script) + """ + return self._python_script + + +class OutputsPythonScriptExec(_Outputs): + """Intermediate class used to get outputs from + python_script_exec operator. + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.python_script_exec() + >>> # Connect inputs : op.inputs. ... + >>> result_output = op.outputs.output() + """ + + def __init__(self, op: Operator): + super().__init__(python_script_exec._spec().outputs, op) + self._output = Output(python_script_exec._spec().output_pin(0), 0, op) + self._outputs.append(self._output) + + @property + def output(self): + """Allows to get output output of the operator + + Returns + ---------- + my_output : Any + + Examples + -------- + >>> from ansys.dpf import core as dpf + >>> op = dpf.operators.utility.python_script_exec() + >>> # Connect inputs : op.inputs. ... + >>> result_output = op.outputs.output() + """ # noqa: E501 + return self._output diff --git a/ansys/dpf/core/server_types.py b/ansys/dpf/core/server_types.py index ea7ab55cb4a..9843cba842c 100644 --- a/ansys/dpf/core/server_types.py +++ b/ansys/dpf/core/server_types.py @@ -49,6 +49,8 @@ def _get_dll_path(name, ansys_path=None): if ansys_path is None: awp_root = "AWP_ROOT" + str(__ansys_version__) ANSYS_INSTALL = os.environ.get(awp_root, None) + if ANSYS_INSTALL is None: + ANSYS_INSTALL = core.misc.find_ansys() else: ANSYS_INSTALL = ansys_path if ANSYS_INSTALL is None: diff --git a/codacy.yml b/codacy.yml new file mode 100644 index 00000000000..b2ea28ea1e7 --- /dev/null +++ b/codacy.yml @@ -0,0 +1,3 @@ +--- +exclude_paths: + - "./ansys/dpf/core/operators/**/*" \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/00-distributed_total_disp.ipynb b/docs/source/examples/06-distributed-post/00-distributed_total_disp.ipynb deleted file mode 100644 index e9d2d394154..00000000000 --- a/docs/source/examples/06-distributed-post/00-distributed_total_disp.ipynb +++ /dev/null @@ -1,169 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "%matplotlib inline" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n\n# Post processing of displacement on distributed processes\n\nTo help understand this example the following diagram is provided. It shows\nthe operator chain used to compute the final result.\n\n\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Import dpf module and its examples files\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from ansys.dpf import core as dpf\nfrom ansys.dpf.core import examples\nfrom ansys.dpf.core import operators as ops" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Configure the servers\nMake a list of ip addresses and port numbers on which dpf servers are\nstarted. Operator instances will be created on each of those servers to\naddress each a different result file.\nIn this example, we will post process an analysis distributed in 2 files,\nwe will consequently require 2 remote processes.\nTo make this example easier, we will start local servers here,\nbut we could get connected to any existing servers on the network.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)]\nips = [remote_server.ip for remote_server in remote_servers]\nports = [remote_server.port for remote_server in remote_servers]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Print the ips and ports\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "print(\"ips:\", ips)\nprint(\"ports:\", ports)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we show how we could send files in temporary directory if we were not\nin shared memory\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "files = examples.download_distributed_files()\nserver_file_paths = [dpf.upload_file_in_tmp_folder(files[0], server=remote_servers[0]),\n dpf.upload_file_in_tmp_folder(files[1], server=remote_servers[1])]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create the operators on the servers\nOn each server we create two new operators for 'displacement' and 'norm'\ncomputations and define their data sources. The displacement operator\nreceives data from the data file in its respective server. And the norm\noperator, being chained to the displacement operator, receives input from the\noutput of this one.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "remote_operators = []\nfor i, server in enumerate(remote_servers):\n displacement = ops.result.displacement(server=server)\n norm = ops.math.norm_fc(displacement, server=server)\n remote_operators.append(norm)\n ds = dpf.DataSources(server_file_paths[i], server=server)\n displacement.inputs.data_sources(ds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create a merge_fields_containers operator able to merge the results\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "merge = ops.utility.merge_fields_containers()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Connect the operators together and get the output\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "for i, server in enumerate(remote_servers):\n merge.connect(i, remote_operators[i], 0)\n\nfc = merge.get_output(0, dpf.types.fields_container)\nprint(fc)\nprint(fc[0].min().data)\nprint(fc[0].max().data)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/00-distributed_total_disp.py b/docs/source/examples/06-distributed-post/00-distributed_total_disp.py deleted file mode 100644 index 49f6055799d..00000000000 --- a/docs/source/examples/06-distributed-post/00-distributed_total_disp.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -.. _ref_distributed_total_disp: - -Post processing of displacement on distributed processes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To help understand this example the following diagram is provided. It shows -the operator chain used to compute the final result. - -.. image:: 00-operator-dep.svg - :align: center - :width: 400 -""" - -############################################################################### -# Import dpf module and its examples files - -from ansys.dpf import core as dpf -from ansys.dpf.core import examples -from ansys.dpf.core import operators as ops - -############################################################################### -# Configure the servers -# ~~~~~~~~~~~~~~~~~~~~~~ -# Make a list of ip addresses and port numbers on which dpf servers are -# started. Operator instances will be created on each of those servers to -# address each a different result file. -# In this example, we will post process an analysis distributed in 2 files, -# we will consequently require 2 remote processes. -# To make this example easier, we will start local servers here, -# but we could get connected to any existing servers on the network. - -remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)] -ips = [remote_server.ip for remote_server in remote_servers] -ports = [remote_server.port for remote_server in remote_servers] - -############################################################################### -# Print the ips and ports -print("ips:", ips) -print("ports:", ports) - -############################################################################### -# Here we show how we could send files in temporary directory if we were not -# in shared memory -files = examples.download_distributed_files() -server_file_paths = [dpf.upload_file_in_tmp_folder(files[0], server=remote_servers[0]), - dpf.upload_file_in_tmp_folder(files[1], server=remote_servers[1])] - -############################################################################### -# Create the operators on the servers -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# On each server we create two new operators for 'displacement' and 'norm' -# computations and define their data sources. The displacement operator -# receives data from the data file in its respective server. And the norm -# operator, being chained to the displacement operator, receives input from the -# output of this one. -remote_operators = [] -for i, server in enumerate(remote_servers): - displacement = ops.result.displacement(server=server) - norm = ops.math.norm_fc(displacement, server=server) - remote_operators.append(norm) - ds = dpf.DataSources(server_file_paths[i], server=server) - displacement.inputs.data_sources(ds) - -############################################################################### -# Create a merge_fields_containers operator able to merge the results -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -merge = ops.utility.merge_fields_containers() - -############################################################################### -# Connect the operators together and get the output -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -for i, server in enumerate(remote_servers): - merge.connect(i, remote_operators[i], 0) - -fc = merge.get_output(0, dpf.types.fields_container) -print(fc) -print(fc[0].min().data) -print(fc[0].max().data) diff --git a/docs/source/examples/06-distributed-post/00-distributed_total_disp.py.md5 b/docs/source/examples/06-distributed-post/00-distributed_total_disp.py.md5 deleted file mode 100644 index 75df16d1fc1..00000000000 --- a/docs/source/examples/06-distributed-post/00-distributed_total_disp.py.md5 +++ /dev/null @@ -1 +0,0 @@ -c7db4566a3ddd7b357d5277698504001 \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/00-distributed_total_disp.rst b/docs/source/examples/06-distributed-post/00-distributed_total_disp.rst deleted file mode 100644 index 326e07a8141..00000000000 --- a/docs/source/examples/06-distributed-post/00-distributed_total_disp.rst +++ /dev/null @@ -1,247 +0,0 @@ - -.. DO NOT EDIT. -.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. -.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: -.. "examples\06-distributed-post\00-distributed_total_disp.py" -.. LINE NUMBERS ARE GIVEN BELOW. - -.. only:: html - - .. note:: - :class: sphx-glr-download-link-note - - Click :ref:`here ` - to download the full example code - -.. rst-class:: sphx-glr-example-title - -.. _sphx_glr_examples_06-distributed-post_00-distributed_total_disp.py: - - -.. _ref_distributed_total_disp: - -Post processing of displacement on distributed processes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To help understand this example the following diagram is provided. It shows -the operator chain used to compute the final result. - -.. image:: 00-operator-dep.svg - :align: center - :width: 400 - -.. GENERATED FROM PYTHON SOURCE LINES 16-17 - -Import dpf module and its examples files - -.. GENERATED FROM PYTHON SOURCE LINES 17-22 - -.. code-block:: default - - - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 23-32 - -Configure the servers -~~~~~~~~~~~~~~~~~~~~~~ -Make a list of ip addresses and port numbers on which dpf servers are -started. Operator instances will be created on each of those servers to -address each a different result file. -In this example, we will post process an analysis distributed in 2 files, -we will consequently require 2 remote processes. -To make this example easier, we will start local servers here, -but we could get connected to any existing servers on the network. - -.. GENERATED FROM PYTHON SOURCE LINES 32-37 - -.. code-block:: default - - - remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)] - ips = [remote_server.ip for remote_server in remote_servers] - ports = [remote_server.port for remote_server in remote_servers] - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 38-39 - -Print the ips and ports - -.. GENERATED FROM PYTHON SOURCE LINES 39-42 - -.. code-block:: default - - print("ips:", ips) - print("ports:", ports) - - - - - -.. rst-class:: sphx-glr-script-out - - Out: - - .. code-block:: none - - ips: ['127.0.0.1', '127.0.0.1'] - ports: [50057, 50058] - - - - -.. GENERATED FROM PYTHON SOURCE LINES 43-45 - -Here we show how we could send files in temporary directory if we were not -in shared memory - -.. GENERATED FROM PYTHON SOURCE LINES 45-49 - -.. code-block:: default - - files = examples.download_distributed_files() - server_file_paths = [dpf.upload_file_in_tmp_folder(files[0], server=remote_servers[0]), - dpf.upload_file_in_tmp_folder(files[1], server=remote_servers[1])] - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 50-57 - -Create the operators on the servers -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -On each server we create two new operators for 'displacement' and 'norm' -computations and define their data sources. The displacement operator -receives data from the data file in its respective server. And the norm -operator, being chained to the displacement operator, receives input from the -output of this one. - -.. GENERATED FROM PYTHON SOURCE LINES 57-65 - -.. code-block:: default - - remote_operators = [] - for i, server in enumerate(remote_servers): - displacement = ops.result.displacement(server=server) - norm = ops.math.norm_fc(displacement, server=server) - remote_operators.append(norm) - ds = dpf.DataSources(server_file_paths[i], server=server) - displacement.inputs.data_sources(ds) - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 66-68 - -Create a merge_fields_containers operator able to merge the results -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. GENERATED FROM PYTHON SOURCE LINES 68-71 - -.. code-block:: default - - - merge = ops.utility.merge_fields_containers() - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 72-74 - -Connect the operators together and get the output -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. GENERATED FROM PYTHON SOURCE LINES 74-82 - -.. code-block:: default - - - for i, server in enumerate(remote_servers): - merge.connect(i, remote_operators[i], 0) - - fc = merge.get_output(0, dpf.types.fields_container) - print(fc) - print(fc[0].min().data) - print(fc[0].max().data) - - - - -.. rst-class:: sphx-glr-script-out - - Out: - - .. code-block:: none - - DPF Fields Container - with 1 field(s) - defined on labels: time - - with: - - field 0 {time: 1} with Nodal location, 1 components and 432 entities. - - [0.] - [10.03242272] - - - - - -.. rst-class:: sphx-glr-timing - - **Total running time of the script:** ( 0 minutes 0.711 seconds) - - -.. _sphx_glr_download_examples_06-distributed-post_00-distributed_total_disp.py: - - -.. only :: html - - .. container:: sphx-glr-footer - :class: sphx-glr-footer-example - - - - .. container:: sphx-glr-download sphx-glr-download-python - - :download:`Download Python source code: 00-distributed_total_disp.py <00-distributed_total_disp.py>` - - - - .. container:: sphx-glr-download sphx-glr-download-jupyter - - :download:`Download Jupyter notebook: 00-distributed_total_disp.ipynb <00-distributed_total_disp.ipynb>` - - -.. only:: html - - .. rst-class:: sphx-glr-signature - - `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/source/examples/06-distributed-post/00-distributed_total_disp_codeobj.pickle b/docs/source/examples/06-distributed-post/00-distributed_total_disp_codeobj.pickle deleted file mode 100644 index 0f2c875d000..00000000000 Binary files a/docs/source/examples/06-distributed-post/00-distributed_total_disp_codeobj.pickle and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/00-operator-dep.dot b/docs/source/examples/06-distributed-post/00-operator-dep.dot deleted file mode 100644 index a99365e8260..00000000000 --- a/docs/source/examples/06-distributed-post/00-operator-dep.dot +++ /dev/null @@ -1,36 +0,0 @@ -digraph foo { - graph [pad="0", nodesep="0.3", ranksep="0.3"] - node [shape=box, style=filled, fillcolor="#ffcc00", margin="0"]; - rankdir=LR; - splines=line; - - disp01 [label="displacement"]; - disp02 [label="displacement"]; - norm01 [label="norm"]; - norm02 [label="norm"]; - - subgraph cluster_1 { - ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - ds01 -> disp01 [style=dashed]; - disp01 -> norm01; - - label="Server 1"; - style=filled; - fillcolor=lightgrey; - } - - subgraph cluster_2 { - ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - ds02 -> disp02 [style=dashed]; - disp02 -> norm02; - - label="Server 2"; - style=filled; - fillcolor=lightgrey; - } - - norm01 -> "merge"; - norm02 -> "merge"; -} diff --git a/docs/source/examples/06-distributed-post/00-operator-dep.svg b/docs/source/examples/06-distributed-post/00-operator-dep.svg deleted file mode 100644 index f05e0868fc2..00000000000 --- a/docs/source/examples/06-distributed-post/00-operator-dep.svg +++ /dev/null @@ -1,101 +0,0 @@ - - - - - - -foo - - -cluster_1 - -Server 1 - - -cluster_2 - -Server 2 - - - -disp01 - -displacement - - - -norm01 - -norm - - - -disp01->norm01 - - - - - -disp02 - -displacement - - - -norm02 - -norm - - - -disp02->norm02 - - - - - -merge - -merge - - - -norm01->merge - - - - - -norm02->merge - - - - - -ds01 - -data_src - - - -ds01->disp01 - - - - - -ds02 - -data_src - - - -ds02->disp02 - - - - - diff --git a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.ipynb b/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.ipynb deleted file mode 100644 index a0ec25fd289..00000000000 --- a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.ipynb +++ /dev/null @@ -1,169 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "%matplotlib inline" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n\n# Create custom workflow on distributed processes\nThis example shows how distributed files can be read and post processed\non distributed processes. After remote post processing,\nresults are merged on the local process. In this example, different operator\nsequences are directly created on different servers. These operators are then\nconnected together without having to care that they are on remote processes.\n\n\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Import dpf module and its examples files\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from ansys.dpf import core as dpf\nfrom ansys.dpf.core import examples\nfrom ansys.dpf.core import operators as ops" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Configure the servers\nTo make this example easier, we will start local servers here,\nbut we could get connected to any existing servers on the network.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we show how we could send files in temporary directory if we were not\nin shared memory\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "files = examples.download_distributed_files()\nserver_file_paths = [dpf.upload_file_in_tmp_folder(files[0], server=remote_servers[0]),\n dpf.upload_file_in_tmp_folder(files[1], server=remote_servers[1])]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "First operator chain.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "remote_operators = []\n\nstress1 = ops.result.stress(server=remote_servers[0])\nremote_operators.append(stress1)\nds = dpf.DataSources(server_file_paths[0], server=remote_servers[0])\nstress1.inputs.data_sources(ds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Second operator chain.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "stress2 = ops.result.stress(server=remote_servers[1])\nmul = stress2 * 2.0\nremote_operators.append(mul)\nds = dpf.DataSources(server_file_paths[1], server=remote_servers[1])\nstress2.inputs.data_sources(ds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Local merge operator.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "merge = ops.utility.merge_fields_containers()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Connect the operator chains together and get the output\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "nodal = ops.averaging.to_nodal_fc(merge)\n\nmerge.connect(0, remote_operators[0], 0)\nmerge.connect(1, remote_operators[1], 0)\n\nfc = nodal.get_output(0, dpf.types.fields_container)\nprint(fc[0])\nfc[0].meshed_region.plot(fc[0])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.py b/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.py deleted file mode 100644 index aef10f94549..00000000000 --- a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -.. _ref_distributed_workflows_on_remote: - -Create custom workflow on distributed processes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This example shows how distributed files can be read and post processed -on distributed processes. After remote post processing, -results are merged on the local process. In this example, different operator -sequences are directly created on different servers. These operators are then -connected together without having to care that they are on remote processes. - -.. image:: 01-operator-dep.svg - :align: center - :width: 400 -""" -############################################################################### -# Import dpf module and its examples files - -from ansys.dpf import core as dpf -from ansys.dpf.core import examples -from ansys.dpf.core import operators as ops - -############################################################################### -# Configure the servers -# ~~~~~~~~~~~~~~~~~~~~~ -# To make this example easier, we will start local servers here, -# but we could get connected to any existing servers on the network. - -remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)] - -############################################################################### -# Here we show how we could send files in temporary directory if we were not -# in shared memory - -files = examples.download_distributed_files() -server_file_paths = [dpf.upload_file_in_tmp_folder(files[0], server=remote_servers[0]), - dpf.upload_file_in_tmp_folder(files[1], server=remote_servers[1])] - -############################################################################### -# First operator chain. - -remote_operators = [] - -stress1 = ops.result.stress(server=remote_servers[0]) -remote_operators.append(stress1) -ds = dpf.DataSources(server_file_paths[0], server=remote_servers[0]) -stress1.inputs.data_sources(ds) - -############################################################################### -# Second operator chain. - -stress2 = ops.result.stress(server=remote_servers[1]) -mul = stress2 * 2.0 -remote_operators.append(mul) -ds = dpf.DataSources(server_file_paths[1], server=remote_servers[1]) -stress2.inputs.data_sources(ds) - -############################################################################### -# Local merge operator. - -merge = ops.utility.merge_fields_containers() - -############################################################################### -# Connect the operator chains together and get the output -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -nodal = ops.averaging.to_nodal_fc(merge) - -merge.connect(0, remote_operators[0], 0) -merge.connect(1, remote_operators[1], 0) - -fc = nodal.get_output(0, dpf.types.fields_container) -print(fc[0]) -fc[0].meshed_region.plot(fc[0]) diff --git a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.py.md5 b/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.py.md5 deleted file mode 100644 index a34a8b54e1d..00000000000 --- a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.py.md5 +++ /dev/null @@ -1 +0,0 @@ -fe17cd1215c2b1ca07b7355552feed3a \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.rst b/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.rst deleted file mode 100644 index f9fc58d87cb..00000000000 --- a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote.rst +++ /dev/null @@ -1,234 +0,0 @@ - -.. DO NOT EDIT. -.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. -.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: -.. "examples\06-distributed-post\01-distributed_workflows_on_remote.py" -.. LINE NUMBERS ARE GIVEN BELOW. - -.. only:: html - - .. note:: - :class: sphx-glr-download-link-note - - Click :ref:`here ` - to download the full example code - -.. rst-class:: sphx-glr-example-title - -.. _sphx_glr_examples_06-distributed-post_01-distributed_workflows_on_remote.py: - - -.. _ref_distributed_workflows_on_remote: - -Create custom workflow on distributed processes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This example shows how distributed files can be read and post processed -on distributed processes. After remote post processing, -results are merged on the local process. In this example, different operator -sequences are directly created on different servers. These operators are then -connected together without having to care that they are on remote processes. - -.. image:: 01-operator-dep.svg - :align: center - :width: 400 - -.. GENERATED FROM PYTHON SOURCE LINES 17-18 - -Import dpf module and its examples files - -.. GENERATED FROM PYTHON SOURCE LINES 18-23 - -.. code-block:: default - - - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 24-28 - -Configure the servers -~~~~~~~~~~~~~~~~~~~~~ -To make this example easier, we will start local servers here, -but we could get connected to any existing servers on the network. - -.. GENERATED FROM PYTHON SOURCE LINES 28-31 - -.. code-block:: default - - - remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)] - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 32-34 - -Here we show how we could send files in temporary directory if we were not -in shared memory - -.. GENERATED FROM PYTHON SOURCE LINES 34-39 - -.. code-block:: default - - - files = examples.download_distributed_files() - server_file_paths = [dpf.upload_file_in_tmp_folder(files[0], server=remote_servers[0]), - dpf.upload_file_in_tmp_folder(files[1], server=remote_servers[1])] - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 40-41 - -First operator chain. - -.. GENERATED FROM PYTHON SOURCE LINES 41-49 - -.. code-block:: default - - - remote_operators = [] - - stress1 = ops.result.stress(server=remote_servers[0]) - remote_operators.append(stress1) - ds = dpf.DataSources(server_file_paths[0], server=remote_servers[0]) - stress1.inputs.data_sources(ds) - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 50-51 - -Second operator chain. - -.. GENERATED FROM PYTHON SOURCE LINES 51-58 - -.. code-block:: default - - - stress2 = ops.result.stress(server=remote_servers[1]) - mul = stress2 * 2.0 - remote_operators.append(mul) - ds = dpf.DataSources(server_file_paths[1], server=remote_servers[1]) - stress2.inputs.data_sources(ds) - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 59-60 - -Local merge operator. - -.. GENERATED FROM PYTHON SOURCE LINES 60-63 - -.. code-block:: default - - - merge = ops.utility.merge_fields_containers() - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 64-66 - -Connect the operator chains together and get the output -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. GENERATED FROM PYTHON SOURCE LINES 66-75 - -.. code-block:: default - - - nodal = ops.averaging.to_nodal_fc(merge) - - merge.connect(0, remote_operators[0], 0) - merge.connect(1, remote_operators[1], 0) - - fc = nodal.get_output(0, dpf.types.fields_container) - print(fc[0]) - fc[0].meshed_region.plot(fc[0]) - - - -.. image-sg:: /examples/06-distributed-post/images/sphx_glr_01-distributed_workflows_on_remote_001.png - :alt: 01 distributed workflows on remote - :srcset: /examples/06-distributed-post/images/sphx_glr_01-distributed_workflows_on_remote_001.png - :class: sphx-glr-single-img - - -.. rst-class:: sphx-glr-script-out - - Out: - - .. code-block:: none - - DPF stress_7491.964387Hz Field - Location: Nodal - Unit: Pa - 432 entities - Data:6 components and 432 elementary data - - - - - - -.. rst-class:: sphx-glr-timing - - **Total running time of the script:** ( 0 minutes 1.630 seconds) - - -.. _sphx_glr_download_examples_06-distributed-post_01-distributed_workflows_on_remote.py: - - -.. only :: html - - .. container:: sphx-glr-footer - :class: sphx-glr-footer-example - - - - .. container:: sphx-glr-download sphx-glr-download-python - - :download:`Download Python source code: 01-distributed_workflows_on_remote.py <01-distributed_workflows_on_remote.py>` - - - - .. container:: sphx-glr-download sphx-glr-download-jupyter - - :download:`Download Jupyter notebook: 01-distributed_workflows_on_remote.ipynb <01-distributed_workflows_on_remote.ipynb>` - - -.. only:: html - - .. rst-class:: sphx-glr-signature - - `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote_codeobj.pickle b/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote_codeobj.pickle deleted file mode 100644 index 43a9c2e54cb..00000000000 Binary files a/docs/source/examples/06-distributed-post/01-distributed_workflows_on_remote_codeobj.pickle and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/01-operator-dep.dot b/docs/source/examples/06-distributed-post/01-operator-dep.dot deleted file mode 100644 index c39ea927ffd..00000000000 --- a/docs/source/examples/06-distributed-post/01-operator-dep.dot +++ /dev/null @@ -1,30 +0,0 @@ -digraph foo { - graph [pad="0", nodesep="0.3", ranksep="0.3"] - node [shape=box, style=filled, fillcolor="#ffcc00", margin="0"]; - rankdir=LR; - splines=line; - - subgraph cluster_1 { - ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - ds01 -> stress1 [style=dashed]; - - label="Server 1"; - style=filled; - fillcolor=lightgrey; - } - - subgraph cluster_2 { - ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - ds02 -> stress2 [style=dashed]; - stress2 -> mul; - - label="Server 2"; - style=filled; - fillcolor=lightgrey; - } - - stress1 -> "merge"; - mul -> "merge"; -} diff --git a/docs/source/examples/06-distributed-post/01-operator-dep.svg b/docs/source/examples/06-distributed-post/01-operator-dep.svg deleted file mode 100644 index c371cd09e9c..00000000000 --- a/docs/source/examples/06-distributed-post/01-operator-dep.svg +++ /dev/null @@ -1,89 +0,0 @@ - - - - - - -foo - - -cluster_1 - -Server 1 - - -cluster_2 - -Server 2 - - - -ds01 - -data_src - - - -stress1 - -stress1 - - - -ds01->stress1 - - - - - -merge - -merge - - - -stress1->merge - - - - - -ds02 - -data_src - - - -stress2 - -stress2 - - - -ds02->stress2 - - - - - -mul - -mul - - - -stress2->mul - - - - - -mul->merge - - - - - diff --git a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.ipynb b/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.ipynb deleted file mode 100644 index c2eb2b0bcd4..00000000000 --- a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.ipynb +++ /dev/null @@ -1,169 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "%matplotlib inline" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n\n# Distributed modal superposition\nThis example shows how distributed files can be read and expanded\non distributed processes. The modal basis (2 distributed files) is read\non 2 remote servers and the modal response reading and the expansion is\ndone on a third server.\n\nTo help understand this example the following diagram is provided. It shows\nthe operator chain used to compute the final result.\n\n\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Import dpf module and its examples files.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from ansys.dpf import core as dpf\nfrom ansys.dpf.core import examples\nfrom ansys.dpf.core import operators as ops" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Configure the servers\nMake a list of ip addresses and port numbers on which dpf servers are\nstarted. Operator instances will be created on each of those servers to\naddress each a different result file.\nIn this example, we will post process an analysis distributed in 2 files,\nwe will consequently require 2 remote processes.\nTo make this example easier, we will start local servers here,\nbut we could get connected to any existing servers on the network.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)]\nips = [remote_server.ip for remote_server in remote_servers]\nports = [remote_server.port for remote_server in remote_servers]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Print the ips and ports.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "print(\"ips:\", ips)\nprint(\"ports:\", ports)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Choose the file path.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "base_path = examples.distributed_msup_folder\nfiles = [base_path + r'/file0.mode', base_path + r'/file1.mode']\nfiles_aux = [base_path + r'/file0.rst', base_path + r'/file1.rst']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create the operators on the servers\nOn each server we create two new operators, one for 'displacement' computations\nand a 'mesh_provider' operator and then define their data sources. The displacement\nand mesh_provider operators receive data from their respective data files on each server.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "remote_displacement_operators = []\nremote_mesh_operators = []\nfor i, server in enumerate(remote_servers):\n displacement = ops.result.displacement(server=server)\n mesh = ops.mesh.mesh_provider(server=server)\n remote_displacement_operators.append(displacement)\n remote_mesh_operators.append(mesh)\n ds = dpf.DataSources(files[i], server=server)\n ds.add_file_path(files_aux[i])\n displacement.inputs.data_sources(ds)\n mesh.inputs.data_sources(ds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create a local operators chain for expansion\nIn the following series of operators we merge the modal basis, the meshes, read\nthe modal response and expand the modal response with the modal basis.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "merge_fields = ops.utility.merge_fields_containers()\nmerge_mesh = ops.utility.merge_meshes()\n\nds = dpf.DataSources(base_path + r'/file_load_1.rfrq')\nresponse = ops.result.displacement(data_sources=ds)\nresponse.inputs.mesh(merge_mesh.outputs.merges_mesh)\n\nexpansion = ops.math.modal_superposition(\n solution_in_modal_space=response,\n modal_basis=merge_fields\n)\ncomponent = ops.logic.component_selector_fc(expansion, 1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Connect the operator chains together and get the output\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "for i, server in enumerate(remote_servers):\n merge_fields.connect(i, remote_displacement_operators[i], 0)\n merge_mesh.connect(i, remote_mesh_operators[i], 0)\n\nfc = component.get_output(0, dpf.types.fields_container)\nmerged_mesh = merge_mesh.get_output(0, dpf.types.meshed_region)\n\nmerged_mesh.plot(fc.get_field_by_time_complex_ids(1, 0))\nmerged_mesh.plot(fc.get_field_by_time_complex_ids(10, 0))\nprint(fc)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.py b/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.py deleted file mode 100644 index bbf4e0862dd..00000000000 --- a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.py +++ /dev/null @@ -1,102 +0,0 @@ -""" -.. _ref_distributed_msup: - -Distributed modal superposition -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This example shows how distributed files can be read and expanded -on distributed processes. The modal basis (2 distributed files) is read -on 2 remote servers and the modal response reading and the expansion is -done on a third server. - -To help understand this example the following diagram is provided. It shows -the operator chain used to compute the final result. - -.. image:: 02-operator-dep.svg - :align: center - :width: 800 -""" - -############################################################################### -# Import dpf module and its examples files. - -from ansys.dpf import core as dpf -from ansys.dpf.core import examples -from ansys.dpf.core import operators as ops - -############################################################################### -# Configure the servers -# ~~~~~~~~~~~~~~~~~~~~~ -# Make a list of ip addresses and port numbers on which dpf servers are -# started. Operator instances will be created on each of those servers to -# address each a different result file. -# In this example, we will post process an analysis distributed in 2 files, -# we will consequently require 2 remote processes. -# To make this example easier, we will start local servers here, -# but we could get connected to any existing servers on the network. - -remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)] -ips = [remote_server.ip for remote_server in remote_servers] -ports = [remote_server.port for remote_server in remote_servers] - -############################################################################### -# Print the ips and ports. -print("ips:", ips) -print("ports:", ports) - -############################################################################### -# Choose the file path. - -base_path = examples.distributed_msup_folder -files = [base_path + r'/file0.mode', base_path + r'/file1.mode'] -files_aux = [base_path + r'/file0.rst', base_path + r'/file1.rst'] - -############################################################################### -# Create the operators on the servers -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# On each server we create two new operators, one for 'displacement' computations -# and a 'mesh_provider' operator and then define their data sources. The displacement -# and mesh_provider operators receive data from their respective data files on each server. -remote_displacement_operators = [] -remote_mesh_operators = [] -for i, server in enumerate(remote_servers): - displacement = ops.result.displacement(server=server) - mesh = ops.mesh.mesh_provider(server=server) - remote_displacement_operators.append(displacement) - remote_mesh_operators.append(mesh) - ds = dpf.DataSources(files[i], server=server) - ds.add_file_path(files_aux[i]) - displacement.inputs.data_sources(ds) - mesh.inputs.data_sources(ds) - -############################################################################### -# Create a local operators chain for expansion -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# In the following series of operators we merge the modal basis, the meshes, read -# the modal response and expand the modal response with the modal basis. - -merge_fields = ops.utility.merge_fields_containers() -merge_mesh = ops.utility.merge_meshes() - -ds = dpf.DataSources(base_path + r'/file_load_1.rfrq') -response = ops.result.displacement(data_sources=ds) -response.inputs.mesh(merge_mesh.outputs.merges_mesh) - -expansion = ops.math.modal_superposition( - solution_in_modal_space=response, - modal_basis=merge_fields -) -component = ops.logic.component_selector_fc(expansion, 1) - -############################################################################### -# Connect the operator chains together and get the output -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -for i, server in enumerate(remote_servers): - merge_fields.connect(i, remote_displacement_operators[i], 0) - merge_mesh.connect(i, remote_mesh_operators[i], 0) - -fc = component.get_output(0, dpf.types.fields_container) -merged_mesh = merge_mesh.get_output(0, dpf.types.meshed_region) - -merged_mesh.plot(fc.get_field_by_time_complex_ids(1, 0)) -merged_mesh.plot(fc.get_field_by_time_complex_ids(10, 0)) -print(fc) diff --git a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.py.md5 b/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.py.md5 deleted file mode 100644 index 64b9c205aac..00000000000 --- a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.py.md5 +++ /dev/null @@ -1 +0,0 @@ -12fa856147254622aeff3ffed4456802 \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.rst b/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.rst deleted file mode 100644 index b1d90c0ce9d..00000000000 --- a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion.rst +++ /dev/null @@ -1,302 +0,0 @@ - -.. DO NOT EDIT. -.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. -.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: -.. "examples\06-distributed-post\02-distributed-msup_expansion.py" -.. LINE NUMBERS ARE GIVEN BELOW. - -.. only:: html - - .. note:: - :class: sphx-glr-download-link-note - - Click :ref:`here ` - to download the full example code - -.. rst-class:: sphx-glr-example-title - -.. _sphx_glr_examples_06-distributed-post_02-distributed-msup_expansion.py: - - -.. _ref_distributed_msup: - -Distributed modal superposition -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This example shows how distributed files can be read and expanded -on distributed processes. The modal basis (2 distributed files) is read -on 2 remote servers and the modal response reading and the expansion is -done on a third server. - -To help understand this example the following diagram is provided. It shows -the operator chain used to compute the final result. - -.. image:: 02-operator-dep.svg - :align: center - :width: 800 - -.. GENERATED FROM PYTHON SOURCE LINES 20-21 - -Import dpf module and its examples files. - -.. GENERATED FROM PYTHON SOURCE LINES 21-26 - -.. code-block:: default - - - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 27-36 - -Configure the servers -~~~~~~~~~~~~~~~~~~~~~ -Make a list of ip addresses and port numbers on which dpf servers are -started. Operator instances will be created on each of those servers to -address each a different result file. -In this example, we will post process an analysis distributed in 2 files, -we will consequently require 2 remote processes. -To make this example easier, we will start local servers here, -but we could get connected to any existing servers on the network. - -.. GENERATED FROM PYTHON SOURCE LINES 36-41 - -.. code-block:: default - - - remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)] - ips = [remote_server.ip for remote_server in remote_servers] - ports = [remote_server.port for remote_server in remote_servers] - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 42-43 - -Print the ips and ports. - -.. GENERATED FROM PYTHON SOURCE LINES 43-46 - -.. code-block:: default - - print("ips:", ips) - print("ports:", ports) - - - - - -.. rst-class:: sphx-glr-script-out - - Out: - - .. code-block:: none - - ips: ['127.0.0.1', '127.0.0.1'] - ports: [50057, 50058] - - - - -.. GENERATED FROM PYTHON SOURCE LINES 47-48 - -Choose the file path. - -.. GENERATED FROM PYTHON SOURCE LINES 48-53 - -.. code-block:: default - - - base_path = examples.distributed_msup_folder - files = [base_path + r'/file0.mode', base_path + r'/file1.mode'] - files_aux = [base_path + r'/file0.rst', base_path + r'/file1.rst'] - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 54-59 - -Create the operators on the servers -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -On each server we create two new operators, one for 'displacement' computations -and a 'mesh_provider' operator and then define their data sources. The displacement -and mesh_provider operators receive data from their respective data files on each server. - -.. GENERATED FROM PYTHON SOURCE LINES 59-71 - -.. code-block:: default - - remote_displacement_operators = [] - remote_mesh_operators = [] - for i, server in enumerate(remote_servers): - displacement = ops.result.displacement(server=server) - mesh = ops.mesh.mesh_provider(server=server) - remote_displacement_operators.append(displacement) - remote_mesh_operators.append(mesh) - ds = dpf.DataSources(files[i], server=server) - ds.add_file_path(files_aux[i]) - displacement.inputs.data_sources(ds) - mesh.inputs.data_sources(ds) - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 72-76 - -Create a local operators chain for expansion -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In the following series of operators we merge the modal basis, the meshes, read -the modal response and expand the modal response with the modal basis. - -.. GENERATED FROM PYTHON SOURCE LINES 76-90 - -.. code-block:: default - - - merge_fields = ops.utility.merge_fields_containers() - merge_mesh = ops.utility.merge_meshes() - - ds = dpf.DataSources(base_path + r'/file_load_1.rfrq') - response = ops.result.displacement(data_sources=ds) - response.inputs.mesh(merge_mesh.outputs.merges_mesh) - - expansion = ops.math.modal_superposition( - solution_in_modal_space=response, - modal_basis=merge_fields - ) - component = ops.logic.component_selector_fc(expansion, 1) - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 91-93 - -Connect the operator chains together and get the output -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. GENERATED FROM PYTHON SOURCE LINES 93-103 - -.. code-block:: default - - for i, server in enumerate(remote_servers): - merge_fields.connect(i, remote_displacement_operators[i], 0) - merge_mesh.connect(i, remote_mesh_operators[i], 0) - - fc = component.get_output(0, dpf.types.fields_container) - merged_mesh = merge_mesh.get_output(0, dpf.types.meshed_region) - - merged_mesh.plot(fc.get_field_by_time_complex_ids(1, 0)) - merged_mesh.plot(fc.get_field_by_time_complex_ids(10, 0)) - print(fc) - - - -.. rst-class:: sphx-glr-horizontal - - - * - - .. image-sg:: /examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_001.png - :alt: 02 distributed msup expansion - :srcset: /examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_001.png - :class: sphx-glr-multi-img - - * - - .. image-sg:: /examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_002.png - :alt: 02 distributed msup expansion - :srcset: /examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_002.png - :class: sphx-glr-multi-img - - -.. rst-class:: sphx-glr-script-out - - Out: - - .. code-block:: none - - DPF Fields Container - with 20 field(s) - defined on labels: complex time - - with: - - field 0 {complex: 0, time: 1} with Nodal location, 1 components and 1065 entities. - - field 1 {complex: 1, time: 1} with Nodal location, 1 components and 1065 entities. - - field 2 {complex: 0, time: 2} with Nodal location, 1 components and 1065 entities. - - field 3 {complex: 1, time: 2} with Nodal location, 1 components and 1065 entities. - - field 4 {complex: 0, time: 3} with Nodal location, 1 components and 1065 entities. - - field 5 {complex: 1, time: 3} with Nodal location, 1 components and 1065 entities. - - field 6 {complex: 0, time: 4} with Nodal location, 1 components and 1065 entities. - - field 7 {complex: 1, time: 4} with Nodal location, 1 components and 1065 entities. - - field 8 {complex: 0, time: 5} with Nodal location, 1 components and 1065 entities. - - field 9 {complex: 1, time: 5} with Nodal location, 1 components and 1065 entities. - - field 10 {complex: 0, time: 6} with Nodal location, 1 components and 1065 entities. - - field 11 {complex: 1, time: 6} with Nodal location, 1 components and 1065 entities. - - field 12 {complex: 0, time: 7} with Nodal location, 1 components and 1065 entities. - - field 13 {complex: 1, time: 7} with Nodal location, 1 components and 1065 entities. - - field 14 {complex: 0, time: 8} with Nodal location, 1 components and 1065 entities. - - field 15 {complex: 1, time: 8} with Nodal location, 1 components and 1065 entities. - - field 16 {complex: 0, time: 9} with Nodal location, 1 components and 1065 entities. - - field 17 {complex: 1, time: 9} with Nodal location, 1 components and 1065 entities. - - field 18 {complex: 0, time: 10} with Nodal location, 1 components and 1065 entities. - - field 19 {complex: 1, time: 10} with Nodal location, 1 components and 1065 entities. - - - - - - -.. rst-class:: sphx-glr-timing - - **Total running time of the script:** ( 0 minutes 5.095 seconds) - - -.. _sphx_glr_download_examples_06-distributed-post_02-distributed-msup_expansion.py: - - -.. only :: html - - .. container:: sphx-glr-footer - :class: sphx-glr-footer-example - - - - .. container:: sphx-glr-download sphx-glr-download-python - - :download:`Download Python source code: 02-distributed-msup_expansion.py <02-distributed-msup_expansion.py>` - - - - .. container:: sphx-glr-download sphx-glr-download-jupyter - - :download:`Download Jupyter notebook: 02-distributed-msup_expansion.ipynb <02-distributed-msup_expansion.ipynb>` - - -.. only:: html - - .. rst-class:: sphx-glr-signature - - `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion_codeobj.pickle b/docs/source/examples/06-distributed-post/02-distributed-msup_expansion_codeobj.pickle deleted file mode 100644 index 511b6402715..00000000000 Binary files a/docs/source/examples/06-distributed-post/02-distributed-msup_expansion_codeobj.pickle and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/02-operator-dep.dot b/docs/source/examples/06-distributed-post/02-operator-dep.dot deleted file mode 100644 index 9f0ae38443b..00000000000 --- a/docs/source/examples/06-distributed-post/02-operator-dep.dot +++ /dev/null @@ -1,51 +0,0 @@ -digraph foo { - graph [pad="0", nodesep="0.3", ranksep="0.3"] - node [shape=box, style=filled, fillcolor="#ffcc00", margin="0"]; - rankdir=LR; - splines=line; - - disp01 [label="displacement"]; - disp02 [label="displacement"]; - mesh01 [label="mesh"]; - mesh02 [label="mesh"]; - - subgraph cluster_1 { - ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - disp01; mesh01; - - ds01 -> disp01 [style=dashed]; - ds01 -> mesh01 [style=dashed]; - - label="Server 1"; - style=filled; - fillcolor=lightgrey; - } - - subgraph cluster_2 { - ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - - disp02; mesh02; - - ds02 -> disp02 [style=dashed]; - ds02 -> mesh02 [style=dashed]; - - label="Server 2"; - style=filled; - fillcolor=lightgrey; - } - - disp01 -> "merge_fields"; - mesh01 -> "merged_mesh"; - disp02 -> "merge_fields"; - mesh02 -> "merged_mesh"; - - ds03 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - ds03 -> "response" [style=dashed]; - - "merged_mesh" -> "response"; - "response" -> "expansion"; - "merge_fields" -> "expansion"; - "expansion" -> "component"; -} diff --git a/docs/source/examples/06-distributed-post/02-operator-dep.svg b/docs/source/examples/06-distributed-post/02-operator-dep.svg deleted file mode 100644 index fd04964afca..00000000000 --- a/docs/source/examples/06-distributed-post/02-operator-dep.svg +++ /dev/null @@ -1,173 +0,0 @@ - - - - - - -foo - - -cluster_1 - -Server 1 - - -cluster_2 - -Server 2 - - - -disp01 - -displacement - - - -merge_fields - -merge_fields - - - -disp01->merge_fields - - - - - -disp02 - -displacement - - - -disp02->merge_fields - - - - - -mesh01 - -mesh - - - -merged_mesh - -merged_mesh - - - -mesh01->merged_mesh - - - - - -mesh02 - -mesh - - - -mesh02->merged_mesh - - - - - -ds01 - -data_src - - - -ds01->disp01 - - - - - -ds01->mesh01 - - - - - -ds02 - -data_src - - - -ds02->disp02 - - - - - -ds02->mesh02 - - - - - -expansion - -expansion - - - -merge_fields->expansion - - - - - -response - -response - - - -merged_mesh->response - - - - - -ds03 - -data_src - - - -ds03->response - - - - - -response->expansion - - - - - -component - -component - - - -expansion->component - - - - - diff --git a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.ipynb b/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.ipynb deleted file mode 100644 index 300e4d539bb..00000000000 --- a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.ipynb +++ /dev/null @@ -1,169 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "%matplotlib inline" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n\n# Distributed msup distributed modal response\nThis example shows how distributed files can be read and expanded\non distributed processes. The modal basis (2 distributed files) is read\non 2 remote servers and the modal response (2 distributed files) reading and the expansion is\ndone on a third server.\n\nTo help understand this example the following diagram is provided. It shows\nthe operator chain used to compute the final result.\n\n\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Import dpf module and its examples files.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import os.path\n\nfrom ansys.dpf import core as dpf\nfrom ansys.dpf.core import examples\nfrom ansys.dpf.core import operators as ops" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Configure the servers\nMake a list of ip addresses and port numbers on which dpf servers are\nstarted. Operator instances will be created on each of those servers to\naddress each a different result file.\nIn this example, we will post process an analysis distributed in 2 files,\nwe will consequently require 2 remote processes\nTo make this example easier, we will start local servers here,\nbut we could get connected to any existing servers on the network.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)]\nips = [remote_server.ip for remote_server in remote_servers]\nports = [remote_server.port for remote_server in remote_servers]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Print the ips and ports.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "print(\"ips:\", ips)\nprint(\"ports:\", ports)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Choose the file path.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "base_path = examples.distributed_msup_folder\nfiles = [os.path.join(base_path, \"file0.mode\"), os.path.join(base_path, \"file1.mode\")]\nfiles_aux = [os.path.join(base_path, \"file0.rst\"), os.path.join(base_path, \"file1.rst\")]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create the operators on the servers\nOn each server we create two new operators, one for 'displacement' computations\nand a 'mesh_provider' operator, and then define their data sources. The displacement\nand mesh_provider operators receive data from their respective data files on each server.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "remote_displacement_operators = []\nremote_mesh_operators = []\nfor i, server in enumerate(remote_servers):\n displacement = ops.result.displacement(server=server)\n mesh = ops.mesh.mesh_provider(server=server)\n remote_displacement_operators.append(displacement)\n remote_mesh_operators.append(mesh)\n ds = dpf.DataSources(files[i], server=server)\n ds.add_file_path(files_aux[i])\n displacement.inputs.data_sources(ds)\n mesh.inputs.data_sources(ds)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create a local operators chain for expansion\nIn the following series of operators we merge the modal basis, the meshes, read\nthe modal response and expand the modal response with the modal basis.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "merge_fields = ops.utility.merge_fields_containers()\nmerge_mesh = ops.utility.merge_meshes()\n\nds = dpf.DataSources(os.path.join(base_path, \"file_load_1.rfrq\"))\nresponse = ops.result.displacement(data_sources=ds)\nresponse.inputs.mesh(merge_mesh.outputs.merges_mesh)\n\nds = dpf.DataSources(os.path.join(base_path, \"file_load_2.rfrq\"))\nfrom os import walk\n\nfor (dirpath, dirnames, filenames) in walk(base_path):\n print(filenames)\nresponse2 = ops.result.displacement(data_sources=ds)\nresponse2fc = response2.outputs.fields_container()\nresponse2fc.time_freq_support.time_frequencies.scoping.set_id(0, 2)\n\nmerge_use_pass = ops.utility.merge_fields_containers()\nmerge_use_pass.inputs.fields_containers1(response)\nmerge_use_pass.inputs.fields_containers2(response2fc)\n\nexpansion = ops.math.modal_superposition(\n solution_in_modal_space=merge_use_pass,\n modal_basis=merge_fields\n )\ncomponent = ops.logic.component_selector_fc(expansion, 1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Connect the operator chains together and get the output\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "for i, server in enumerate(remote_servers):\n merge_fields.connect(i, remote_displacement_operators[i], 0)\n merge_mesh.connect(i, remote_mesh_operators[i], 0)\n\nfc = component.get_output(0, dpf.types.fields_container)\nmerged_mesh = merge_mesh.get_output(0, dpf.types.meshed_region)\n\nmerged_mesh.plot(fc.get_field_by_time_complex_ids(1, 0))\nmerged_mesh.plot(fc.get_field_by_time_complex_ids(20, 0))\nprint(fc)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.py b/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.py deleted file mode 100644 index 860e08c9e4b..00000000000 --- a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.py +++ /dev/null @@ -1,115 +0,0 @@ -""" -.. _ref_distributed_msup_steps: - -Distributed msup distributed modal response -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This example shows how distributed files can be read and expanded -on distributed processes. The modal basis (2 distributed files) is read -on 2 remote servers and the modal response (2 distributed files) reading and the expansion is -done on a third server. - -To help understand this example the following diagram is provided. It shows -the operator chain used to compute the final result. - -.. image:: 03-operator-dep.svg - :align: center - :width: 800 -""" - -############################################################################### -# Import dpf module and its examples files. -import os.path - -from ansys.dpf import core as dpf -from ansys.dpf.core import examples -from ansys.dpf.core import operators as ops - -############################################################################### -# Configure the servers -# ~~~~~~~~~~~~~~~~~~~~~ -# Make a list of ip addresses and port numbers on which dpf servers are -# started. Operator instances will be created on each of those servers to -# address each a different result file. -# In this example, we will post process an analysis distributed in 2 files, -# we will consequently require 2 remote processes -# To make this example easier, we will start local servers here, -# but we could get connected to any existing servers on the network. -remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)] -ips = [remote_server.ip for remote_server in remote_servers] -ports = [remote_server.port for remote_server in remote_servers] - -############################################################################### -# Print the ips and ports. -print("ips:", ips) -print("ports:", ports) - -############################################################################### -# Choose the file path. - -base_path = examples.distributed_msup_folder -files = [os.path.join(base_path, "file0.mode"), os.path.join(base_path, "file1.mode")] -files_aux = [os.path.join(base_path, "file0.rst"), os.path.join(base_path, "file1.rst")] - -############################################################################### -# Create the operators on the servers -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# On each server we create two new operators, one for 'displacement' computations -# and a 'mesh_provider' operator, and then define their data sources. The displacement -# and mesh_provider operators receive data from their respective data files on each server. -remote_displacement_operators = [] -remote_mesh_operators = [] -for i, server in enumerate(remote_servers): - displacement = ops.result.displacement(server=server) - mesh = ops.mesh.mesh_provider(server=server) - remote_displacement_operators.append(displacement) - remote_mesh_operators.append(mesh) - ds = dpf.DataSources(files[i], server=server) - ds.add_file_path(files_aux[i]) - displacement.inputs.data_sources(ds) - mesh.inputs.data_sources(ds) - -############################################################################### -# Create a local operators chain for expansion -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# In the following series of operators we merge the modal basis, the meshes, read -# the modal response and expand the modal response with the modal basis. - -merge_fields = ops.utility.merge_fields_containers() -merge_mesh = ops.utility.merge_meshes() - -ds = dpf.DataSources(os.path.join(base_path, "file_load_1.rfrq")) -response = ops.result.displacement(data_sources=ds) -response.inputs.mesh(merge_mesh.outputs.merges_mesh) - -ds = dpf.DataSources(os.path.join(base_path, "file_load_2.rfrq")) -from os import walk - -for (dirpath, dirnames, filenames) in walk(base_path): - print(filenames) -response2 = ops.result.displacement(data_sources=ds) -response2fc = response2.outputs.fields_container() -response2fc.time_freq_support.time_frequencies.scoping.set_id(0, 2) - -merge_use_pass = ops.utility.merge_fields_containers() -merge_use_pass.inputs.fields_containers1(response) -merge_use_pass.inputs.fields_containers2(response2fc) - -expansion = ops.math.modal_superposition( - solution_in_modal_space=merge_use_pass, - modal_basis=merge_fields - ) -component = ops.logic.component_selector_fc(expansion, 1) - -############################################################################### -# Connect the operator chains together and get the output -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -for i, server in enumerate(remote_servers): - merge_fields.connect(i, remote_displacement_operators[i], 0) - merge_mesh.connect(i, remote_mesh_operators[i], 0) - -fc = component.get_output(0, dpf.types.fields_container) -merged_mesh = merge_mesh.get_output(0, dpf.types.meshed_region) - -merged_mesh.plot(fc.get_field_by_time_complex_ids(1, 0)) -merged_mesh.plot(fc.get_field_by_time_complex_ids(20, 0)) -print(fc) diff --git a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.py.md5 b/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.py.md5 deleted file mode 100644 index 3b174cdcdd1..00000000000 --- a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.py.md5 +++ /dev/null @@ -1 +0,0 @@ -3b3383f39064e84e39438992e3964441 \ No newline at end of file diff --git a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.rst b/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.rst deleted file mode 100644 index a7fc7455a62..00000000000 --- a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps.rst +++ /dev/null @@ -1,343 +0,0 @@ - -.. DO NOT EDIT. -.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. -.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: -.. "examples\06-distributed-post\03-distributed-msup_expansion_steps.py" -.. LINE NUMBERS ARE GIVEN BELOW. - -.. only:: html - - .. note:: - :class: sphx-glr-download-link-note - - Click :ref:`here ` - to download the full example code - -.. rst-class:: sphx-glr-example-title - -.. _sphx_glr_examples_06-distributed-post_03-distributed-msup_expansion_steps.py: - - -.. _ref_distributed_msup_steps: - -Distributed msup distributed modal response -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -This example shows how distributed files can be read and expanded -on distributed processes. The modal basis (2 distributed files) is read -on 2 remote servers and the modal response (2 distributed files) reading and the expansion is -done on a third server. - -To help understand this example the following diagram is provided. It shows -the operator chain used to compute the final result. - -.. image:: 03-operator-dep.svg - :align: center - :width: 800 - -.. GENERATED FROM PYTHON SOURCE LINES 20-21 - -Import dpf module and its examples files. - -.. GENERATED FROM PYTHON SOURCE LINES 21-27 - -.. code-block:: default - - import os.path - - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 28-37 - -Configure the servers -~~~~~~~~~~~~~~~~~~~~~ -Make a list of ip addresses and port numbers on which dpf servers are -started. Operator instances will be created on each of those servers to -address each a different result file. -In this example, we will post process an analysis distributed in 2 files, -we will consequently require 2 remote processes -To make this example easier, we will start local servers here, -but we could get connected to any existing servers on the network. - -.. GENERATED FROM PYTHON SOURCE LINES 37-41 - -.. code-block:: default - - remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)] - ips = [remote_server.ip for remote_server in remote_servers] - ports = [remote_server.port for remote_server in remote_servers] - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 42-43 - -Print the ips and ports. - -.. GENERATED FROM PYTHON SOURCE LINES 43-46 - -.. code-block:: default - - print("ips:", ips) - print("ports:", ports) - - - - - -.. rst-class:: sphx-glr-script-out - - Out: - - .. code-block:: none - - ips: ['127.0.0.1', '127.0.0.1'] - ports: [50057, 50058] - - - - -.. GENERATED FROM PYTHON SOURCE LINES 47-48 - -Choose the file path. - -.. GENERATED FROM PYTHON SOURCE LINES 48-53 - -.. code-block:: default - - - base_path = examples.distributed_msup_folder - files = [os.path.join(base_path, "file0.mode"), os.path.join(base_path, "file1.mode")] - files_aux = [os.path.join(base_path, "file0.rst"), os.path.join(base_path, "file1.rst")] - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 54-59 - -Create the operators on the servers -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -On each server we create two new operators, one for 'displacement' computations -and a 'mesh_provider' operator, and then define their data sources. The displacement -and mesh_provider operators receive data from their respective data files on each server. - -.. GENERATED FROM PYTHON SOURCE LINES 59-71 - -.. code-block:: default - - remote_displacement_operators = [] - remote_mesh_operators = [] - for i, server in enumerate(remote_servers): - displacement = ops.result.displacement(server=server) - mesh = ops.mesh.mesh_provider(server=server) - remote_displacement_operators.append(displacement) - remote_mesh_operators.append(mesh) - ds = dpf.DataSources(files[i], server=server) - ds.add_file_path(files_aux[i]) - displacement.inputs.data_sources(ds) - mesh.inputs.data_sources(ds) - - - - - - - - -.. GENERATED FROM PYTHON SOURCE LINES 72-76 - -Create a local operators chain for expansion -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In the following series of operators we merge the modal basis, the meshes, read -the modal response and expand the modal response with the modal basis. - -.. GENERATED FROM PYTHON SOURCE LINES 76-103 - -.. code-block:: default - - - merge_fields = ops.utility.merge_fields_containers() - merge_mesh = ops.utility.merge_meshes() - - ds = dpf.DataSources(os.path.join(base_path, "file_load_1.rfrq")) - response = ops.result.displacement(data_sources=ds) - response.inputs.mesh(merge_mesh.outputs.merges_mesh) - - ds = dpf.DataSources(os.path.join(base_path, "file_load_2.rfrq")) - from os import walk - - for (dirpath, dirnames, filenames) in walk(base_path): - print(filenames) - response2 = ops.result.displacement(data_sources=ds) - response2fc = response2.outputs.fields_container() - response2fc.time_freq_support.time_frequencies.scoping.set_id(0, 2) - - merge_use_pass = ops.utility.merge_fields_containers() - merge_use_pass.inputs.fields_containers1(response) - merge_use_pass.inputs.fields_containers2(response2fc) - - expansion = ops.math.modal_superposition( - solution_in_modal_space=merge_use_pass, - modal_basis=merge_fields - ) - component = ops.logic.component_selector_fc(expansion, 1) - - - - - -.. rst-class:: sphx-glr-script-out - - Out: - - .. code-block:: none - - ['file0.mode', 'file0.rst', 'file1.mode', 'file1.rst', 'file_load_1.rfrq', 'file_load_2.rfrq'] - - - - -.. GENERATED FROM PYTHON SOURCE LINES 104-106 - -Connect the operator chains together and get the output -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. GENERATED FROM PYTHON SOURCE LINES 106-116 - -.. code-block:: default - - for i, server in enumerate(remote_servers): - merge_fields.connect(i, remote_displacement_operators[i], 0) - merge_mesh.connect(i, remote_mesh_operators[i], 0) - - fc = component.get_output(0, dpf.types.fields_container) - merged_mesh = merge_mesh.get_output(0, dpf.types.meshed_region) - - merged_mesh.plot(fc.get_field_by_time_complex_ids(1, 0)) - merged_mesh.plot(fc.get_field_by_time_complex_ids(20, 0)) - print(fc) - - - -.. rst-class:: sphx-glr-horizontal - - - * - - .. image-sg:: /examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_001.png - :alt: 03 distributed msup expansion steps - :srcset: /examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_001.png - :class: sphx-glr-multi-img - - * - - .. image-sg:: /examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_002.png - :alt: 03 distributed msup expansion steps - :srcset: /examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_002.png - :class: sphx-glr-multi-img - - -.. rst-class:: sphx-glr-script-out - - Out: - - .. code-block:: none - - DPF Fields Container - with 40 field(s) - defined on labels: complex time - - with: - - field 0 {complex: 0, time: 1} with Nodal location, 1 components and 1065 entities. - - field 1 {complex: 1, time: 1} with Nodal location, 1 components and 1065 entities. - - field 2 {complex: 0, time: 2} with Nodal location, 1 components and 1065 entities. - - field 3 {complex: 1, time: 2} with Nodal location, 1 components and 1065 entities. - - field 4 {complex: 0, time: 3} with Nodal location, 1 components and 1065 entities. - - field 5 {complex: 1, time: 3} with Nodal location, 1 components and 1065 entities. - - field 6 {complex: 0, time: 4} with Nodal location, 1 components and 1065 entities. - - field 7 {complex: 1, time: 4} with Nodal location, 1 components and 1065 entities. - - field 8 {complex: 0, time: 5} with Nodal location, 1 components and 1065 entities. - - field 9 {complex: 1, time: 5} with Nodal location, 1 components and 1065 entities. - - field 10 {complex: 0, time: 6} with Nodal location, 1 components and 1065 entities. - - field 11 {complex: 1, time: 6} with Nodal location, 1 components and 1065 entities. - - field 12 {complex: 0, time: 7} with Nodal location, 1 components and 1065 entities. - - field 13 {complex: 1, time: 7} with Nodal location, 1 components and 1065 entities. - - field 14 {complex: 0, time: 8} with Nodal location, 1 components and 1065 entities. - - field 15 {complex: 1, time: 8} with Nodal location, 1 components and 1065 entities. - - field 16 {complex: 0, time: 9} with Nodal location, 1 components and 1065 entities. - - field 17 {complex: 1, time: 9} with Nodal location, 1 components and 1065 entities. - - field 18 {complex: 0, time: 10} with Nodal location, 1 components and 1065 entities. - - field 19 {complex: 1, time: 10} with Nodal location, 1 components and 1065 entities. - - field 20 {complex: 0, time: 11} with Nodal location, 1 components and 1065 entities. - - field 21 {complex: 1, time: 11} with Nodal location, 1 components and 1065 entities. - - field 22 {complex: 0, time: 12} with Nodal location, 1 components and 1065 entities. - - field 23 {complex: 1, time: 12} with Nodal location, 1 components and 1065 entities. - - field 24 {complex: 0, time: 13} with Nodal location, 1 components and 1065 entities. - - field 25 {complex: 1, time: 13} with Nodal location, 1 components and 1065 entities. - - field 26 {complex: 0, time: 14} with Nodal location, 1 components and 1065 entities. - - field 27 {complex: 1, time: 14} with Nodal location, 1 components and 1065 entities. - - field 28 {complex: 0, time: 15} with Nodal location, 1 components and 1065 entities. - - field 29 {complex: 1, time: 15} with Nodal location, 1 components and 1065 entities. - - field 30 {complex: 0, time: 16} with Nodal location, 1 components and 1065 entities. - - field 31 {complex: 1, time: 16} with Nodal location, 1 components and 1065 entities. - - field 32 {complex: 0, time: 17} with Nodal location, 1 components and 1065 entities. - - field 33 {complex: 1, time: 17} with Nodal location, 1 components and 1065 entities. - - field 34 {complex: 0, time: 18} with Nodal location, 1 components and 1065 entities. - - field 35 {complex: 1, time: 18} with Nodal location, 1 components and 1065 entities. - - field 36 {complex: 0, time: 19} with Nodal location, 1 components and 1065 entities. - - field 37 {complex: 1, time: 19} with Nodal location, 1 components and 1065 entities. - - field 38 {complex: 0, time: 20} with Nodal location, 1 components and 1065 entities. - - field 39 {complex: 1, time: 20} with Nodal location, 1 components and 1065 entities. - - - - - - -.. rst-class:: sphx-glr-timing - - **Total running time of the script:** ( 0 minutes 2.859 seconds) - - -.. _sphx_glr_download_examples_06-distributed-post_03-distributed-msup_expansion_steps.py: - - -.. only :: html - - .. container:: sphx-glr-footer - :class: sphx-glr-footer-example - - - - .. container:: sphx-glr-download sphx-glr-download-python - - :download:`Download Python source code: 03-distributed-msup_expansion_steps.py <03-distributed-msup_expansion_steps.py>` - - - - .. container:: sphx-glr-download sphx-glr-download-jupyter - - :download:`Download Jupyter notebook: 03-distributed-msup_expansion_steps.ipynb <03-distributed-msup_expansion_steps.ipynb>` - - -.. only:: html - - .. rst-class:: sphx-glr-signature - - `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps_codeobj.pickle b/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps_codeobj.pickle deleted file mode 100644 index 60274fa6aad..00000000000 Binary files a/docs/source/examples/06-distributed-post/03-distributed-msup_expansion_steps_codeobj.pickle and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/03-operator-dep.dot b/docs/source/examples/06-distributed-post/03-operator-dep.dot deleted file mode 100644 index ce9f093990f..00000000000 --- a/docs/source/examples/06-distributed-post/03-operator-dep.dot +++ /dev/null @@ -1,54 +0,0 @@ -digraph foo { - graph [pad="0", nodesep="0.3", ranksep="0.3"] - node [shape=box, style=filled, fillcolor="#ffcc00", margin="0"]; - rankdir=LR; - splines=line; - - disp01 [label="displacement"]; - disp02 [label="displacement"]; - mesh01 [label="mesh"]; - mesh02 [label="mesh"]; - - subgraph cluster_1 { - ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - disp01; mesh01; - - ds01 -> disp01 [style=dashed]; - ds01 -> mesh01 [style=dashed]; - - label="Server 1"; - style=filled; - fillcolor=lightgrey; - } - - subgraph cluster_2 { - ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - disp02; mesh02; - - ds02 -> disp02 [style=dashed]; - ds02 -> mesh02 [style=dashed]; - - label="Server 2"; - style=filled; - fillcolor=lightgrey; - } - - disp01 -> "merge_fields"; - mesh01 -> "merge_mesh"; - disp02 -> "merge_fields"; - mesh02 -> "merge_mesh"; - - ds03 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - ds03 -> "response2" [style=dashed]; - ds04 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - ds04 -> "response" [style=dashed]; - - "merge_mesh" -> "response"; - "response" -> "merge_use_pass"; - "response2" -> "merge_use_pass"; - "merge_use_pass" -> "expansion"; - "merge_fields" -> "expansion"; - "expansion" -> "component"; -} diff --git a/docs/source/examples/06-distributed-post/03-operator-dep.svg b/docs/source/examples/06-distributed-post/03-operator-dep.svg deleted file mode 100644 index 00f50ac766d..00000000000 --- a/docs/source/examples/06-distributed-post/03-operator-dep.svg +++ /dev/null @@ -1,209 +0,0 @@ - - - - - - -foo - - -cluster_1 - -Server 1 - - -cluster_2 - -Server 2 - - - -disp01 - -displacement - - - -merge_fields - -merge_fields - - - -disp01->merge_fields - - - - - -disp02 - -displacement - - - -disp02->merge_fields - - - - - -mesh01 - -mesh - - - -merge_mesh - -merge_mesh - - - -mesh01->merge_mesh - - - - - -mesh02 - -mesh - - - -mesh02->merge_mesh - - - - - -ds01 - -data_src - - - -ds01->disp01 - - - - - -ds01->mesh01 - - - - - -ds02 - -data_src - - - -ds02->disp02 - - - - - -ds02->mesh02 - - - - - -expansion - -expansion - - - -merge_fields->expansion - - - - - -response - -response - - - -merge_mesh->response - - - - - -ds03 - -data_src - - - -response2 - -response2 - - - -ds03->response2 - - - - - -merge_use_pass - -merge_use_pass - - - -response2->merge_use_pass - - - - - -ds04 - -data_src - - - -ds04->response - - - - - -response->merge_use_pass - - - - - -merge_use_pass->expansion - - - - - -component - -component - - - -expansion->component - - - - - diff --git a/docs/source/examples/06-distributed-post/images/graphviz-0b490f262404b99215a9bce9bcf28ed202fd712f.png b/docs/source/examples/06-distributed-post/images/graphviz-0b490f262404b99215a9bce9bcf28ed202fd712f.png deleted file mode 100644 index 0b53d9a6459..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/graphviz-0b490f262404b99215a9bce9bcf28ed202fd712f.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/graphviz-0b490f262404b99215a9bce9bcf28ed202fd712f.png.map b/docs/source/examples/06-distributed-post/images/graphviz-0b490f262404b99215a9bce9bcf28ed202fd712f.png.map deleted file mode 100644 index fe5db591e8c..00000000000 --- a/docs/source/examples/06-distributed-post/images/graphviz-0b490f262404b99215a9bce9bcf28ed202fd712f.png.map +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/docs/source/examples/06-distributed-post/images/graphviz-6c5c6731afa3cb99496c9582a659820e66e59ad8.png b/docs/source/examples/06-distributed-post/images/graphviz-6c5c6731afa3cb99496c9582a659820e66e59ad8.png deleted file mode 100644 index 318e494d038..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/graphviz-6c5c6731afa3cb99496c9582a659820e66e59ad8.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/graphviz-6c5c6731afa3cb99496c9582a659820e66e59ad8.png.map b/docs/source/examples/06-distributed-post/images/graphviz-6c5c6731afa3cb99496c9582a659820e66e59ad8.png.map deleted file mode 100644 index fe5db591e8c..00000000000 --- a/docs/source/examples/06-distributed-post/images/graphviz-6c5c6731afa3cb99496c9582a659820e66e59ad8.png.map +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/docs/source/examples/06-distributed-post/images/graphviz-6d76c45ee399e15a2be231294ffa4ba764e4c442.png b/docs/source/examples/06-distributed-post/images/graphviz-6d76c45ee399e15a2be231294ffa4ba764e4c442.png deleted file mode 100644 index 8a03883c23d..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/graphviz-6d76c45ee399e15a2be231294ffa4ba764e4c442.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/graphviz-6d76c45ee399e15a2be231294ffa4ba764e4c442.png.map b/docs/source/examples/06-distributed-post/images/graphviz-6d76c45ee399e15a2be231294ffa4ba764e4c442.png.map deleted file mode 100644 index fe5db591e8c..00000000000 --- a/docs/source/examples/06-distributed-post/images/graphviz-6d76c45ee399e15a2be231294ffa4ba764e4c442.png.map +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/docs/source/examples/06-distributed-post/images/graphviz-8a6d6449ecd7de654cd0b1382ac1a9d333037718.png b/docs/source/examples/06-distributed-post/images/graphviz-8a6d6449ecd7de654cd0b1382ac1a9d333037718.png deleted file mode 100644 index 229467e5d2d..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/graphviz-8a6d6449ecd7de654cd0b1382ac1a9d333037718.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/graphviz-8a6d6449ecd7de654cd0b1382ac1a9d333037718.png.map b/docs/source/examples/06-distributed-post/images/graphviz-8a6d6449ecd7de654cd0b1382ac1a9d333037718.png.map deleted file mode 100644 index fe5db591e8c..00000000000 --- a/docs/source/examples/06-distributed-post/images/graphviz-8a6d6449ecd7de654cd0b1382ac1a9d333037718.png.map +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/docs/source/examples/06-distributed-post/images/graphviz-d3a2fbe72465ba55879181d1c4ceccf7a25d2c7a.png b/docs/source/examples/06-distributed-post/images/graphviz-d3a2fbe72465ba55879181d1c4ceccf7a25d2c7a.png deleted file mode 100644 index 0b53d9a6459..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/graphviz-d3a2fbe72465ba55879181d1c4ceccf7a25d2c7a.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/graphviz-d3a2fbe72465ba55879181d1c4ceccf7a25d2c7a.png.map b/docs/source/examples/06-distributed-post/images/graphviz-d3a2fbe72465ba55879181d1c4ceccf7a25d2c7a.png.map deleted file mode 100644 index fe5db591e8c..00000000000 --- a/docs/source/examples/06-distributed-post/images/graphviz-d3a2fbe72465ba55879181d1c4ceccf7a25d2c7a.png.map +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_01-distributed_workflows_on_remote_001.png b/docs/source/examples/06-distributed-post/images/sphx_glr_01-distributed_workflows_on_remote_001.png deleted file mode 100644 index 6e156dc71ec..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_01-distributed_workflows_on_remote_001.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_001.png b/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_001.png deleted file mode 100644 index d32ad0bb532..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_001.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_002.png b/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_002.png deleted file mode 100644 index 937eaf8f4db..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed-msup_expansion_002.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed_workflows_on_remote_001.png b/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed_workflows_on_remote_001.png deleted file mode 100644 index c87a7eb7cbf..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_02-distributed_workflows_on_remote_001.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_001.png b/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_001.png deleted file mode 100644 index d854d8fa4f3..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_001.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_002.png b/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_002.png deleted file mode 100644 index 4b874f64b85..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_002.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_001.png b/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_001.png deleted file mode 100644 index d32ad0bb532..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_001.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_002.png b/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_002.png deleted file mode 100644 index fcf8fd2d9af..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_03-distributed-msup_expansion_steps_002.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_04-distributed-msup_expansion_steps_001.png b/docs/source/examples/06-distributed-post/images/sphx_glr_04-distributed-msup_expansion_steps_001.png deleted file mode 100644 index d854d8fa4f3..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_04-distributed-msup_expansion_steps_001.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/sphx_glr_04-distributed-msup_expansion_steps_002.png b/docs/source/examples/06-distributed-post/images/sphx_glr_04-distributed-msup_expansion_steps_002.png deleted file mode 100644 index ac8b80ce9fc..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/sphx_glr_04-distributed-msup_expansion_steps_002.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_00-distributed_total_disp_thumb.png b/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_00-distributed_total_disp_thumb.png deleted file mode 100644 index 8a5fed589d1..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_00-distributed_total_disp_thumb.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_01-distributed_workflows_on_remote_thumb.png b/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_01-distributed_workflows_on_remote_thumb.png deleted file mode 100644 index 3748dd87e97..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_01-distributed_workflows_on_remote_thumb.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_02-distributed-msup_expansion_thumb.png b/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_02-distributed-msup_expansion_thumb.png deleted file mode 100644 index cb42d0be0ec..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_02-distributed-msup_expansion_thumb.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_03-distributed-msup_expansion_steps_thumb.png b/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_03-distributed-msup_expansion_steps_thumb.png deleted file mode 100644 index cb42d0be0ec..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_03-distributed-msup_expansion_steps_thumb.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_04-distributed_total_disp_with_operator_thumb.png b/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_04-distributed_total_disp_with_operator_thumb.png deleted file mode 100644 index b06c4e6a177..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_04-distributed_total_disp_with_operator_thumb.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_04-distributed_total_disp_with_operators_thumb.png b/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_04-distributed_total_disp_with_operators_thumb.png deleted file mode 100644 index 8a5fed589d1..00000000000 Binary files a/docs/source/examples/06-distributed-post/images/thumb/sphx_glr_04-distributed_total_disp_with_operators_thumb.png and /dev/null differ diff --git a/docs/source/examples/06-distributed-post/sg_execution_times.rst b/docs/source/examples/06-distributed-post/sg_execution_times.rst deleted file mode 100644 index 28d83aa6724..00000000000 --- a/docs/source/examples/06-distributed-post/sg_execution_times.rst +++ /dev/null @@ -1,18 +0,0 @@ - -:orphan: - -.. _sphx_glr_examples_06-distributed-post_sg_execution_times: - -Computation times -================= -**00:10.295** total execution time for **examples_06-distributed-post** files: - -+----------------------------------------------------------------------------------------------------------------------------------+-----------+--------+ -| :ref:`sphx_glr_examples_06-distributed-post_02-distributed-msup_expansion.py` (``02-distributed-msup_expansion.py``) | 00:05.095 | 0.0 MB | -+----------------------------------------------------------------------------------------------------------------------------------+-----------+--------+ -| :ref:`sphx_glr_examples_06-distributed-post_03-distributed-msup_expansion_steps.py` (``03-distributed-msup_expansion_steps.py``) | 00:02.859 | 0.0 MB | -+----------------------------------------------------------------------------------------------------------------------------------+-----------+--------+ -| :ref:`sphx_glr_examples_06-distributed-post_01-distributed_workflows_on_remote.py` (``01-distributed_workflows_on_remote.py``) | 00:01.630 | 0.0 MB | -+----------------------------------------------------------------------------------------------------------------------------------+-----------+--------+ -| :ref:`sphx_glr_examples_06-distributed-post_00-distributed_total_disp.py` (``00-distributed_total_disp.py``) | 00:00.711 | 0.0 MB | -+----------------------------------------------------------------------------------------------------------------------------------+-----------+--------+ diff --git a/docs/source/examples/07-averaging/00-compute_and_average.dot b/docs/source/examples/07-averaging/00-compute_and_average.dot deleted file mode 100644 index 1e8b61ac97f..00000000000 --- a/docs/source/examples/07-averaging/00-compute_and_average.dot +++ /dev/null @@ -1,39 +0,0 @@ -digraph foo { - graph [pad="0", nodesep="0.3", ranksep="0.3"] - node [shape=box, style=filled, fillcolor="#ffcc0", margin="0"]; - rankdir=LR; - splines=line; - node [fixedsize=true,width=2.5] - - stress01 [label="stress"]; - stress02 [label="stress"]; - vm01 [label="von_mises_eqv"]; - vm02 [label="von_mises_eqv"]; - avg01 [label="elemental_nodal_to_nodal", width=2.5]; - avg02 [label="elemental_nodal_to_nodal", width=2.5]; - - subgraph cluster_1 { - ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - ds01 -> stress01 [style=dashed]; - stress01 -> vm01; - vm01 -> avg01 - - label="Compute Von Mises then average stresses"; - style=filled; - fillcolor=lightgrey; - } - - subgraph cluster_2 { - ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; - - ds02 -> stress02 [style=dashed]; - stress02 -> avg02; - avg02 -> vm02 - - label="Average stresses then compute Von Mises"; - style=filled; - fillcolor=lightgrey; - } - -} diff --git a/docs/source/examples/07-averaging/00-compute_and_average.svg b/docs/source/examples/07-averaging/00-compute_and_average.svg deleted file mode 100644 index d0f542d2bcd..00000000000 --- a/docs/source/examples/07-averaging/00-compute_and_average.svg +++ /dev/null @@ -1,107 +0,0 @@ - - - - - - -foo - - -cluster_1 - -Average stresses then compute Von Mises - - -cluster_2 - -Compute Von Mises then average stresses - - - -stress01 - -stress - - - -avg01 - -elemental_nodal_to_nodal - - - -stress01->avg01 - - - - - -stress02 - -stress - - - -vm02 - -von_mises_eqv - - - -stress02->vm02 - - - - - -vm01 - -von_mises_eqv - - - -avg02 - -elemental_nodal_to_nodal - - - -vm02->avg02 - - - - - -avg01->vm01 - - - - - -ds01 - -data_src - - - -ds01->stress01 - - - - - -ds02 - -data_src - - - -ds02->stress02 - - - - - diff --git a/docs/source/examples/07-python-operators/images/thumb/out02.glb b/docs/source/examples/07-python-operators/images/thumb/out02.glb deleted file mode 100644 index b9ab1d7d495..00000000000 Binary files a/docs/source/examples/07-python-operators/images/thumb/out02.glb and /dev/null differ diff --git a/examples/00-basic/09-results_over_space_subset.py b/examples/00-basic/09-results_over_space_subset.py index 18b54dd3ea3..30b256f252a 100644 --- a/examples/00-basic/09-results_over_space_subset.py +++ b/examples/00-basic/09-results_over_space_subset.py @@ -141,9 +141,10 @@ print(stress) for body_id in stress.get_mat_scoping().ids: - field = stress.get_field_by_mat_id(body_id) - if field.elementary_data_count > 0: - model.metadata.meshed_region.plot(field) + fields = stress.get_fields_by_mat_id(body_id) + for field in fields: + if field.elementary_data_count > 0: + model.metadata.meshed_region.plot(field) ############################################################################### # Create a custom spatial split diff --git a/examples/06-distributed-post/00-distributed_total_disp.py b/examples/06-distributed-post/00-distributed_total_disp.py index 761f13f7fac..5d753d65457 100644 --- a/examples/06-distributed-post/00-distributed_total_disp.py +++ b/examples/06-distributed-post/00-distributed_total_disp.py @@ -7,9 +7,44 @@ To help understand this example the following diagram is provided. It shows the operator chain used to compute the final result. -.. image:: 00-operator-dep.svg - :align: center - :width: 400 +.. graphviz:: + + digraph foo { + graph [pad="0", nodesep="0.3", ranksep="0.3"] + node [shape=box, style=filled, fillcolor="#ffcc00", margin="0"]; + rankdir=LR; + splines=line; + + disp01 [label="displacement"]; + disp02 [label="displacement"]; + norm01 [label="norm"]; + norm02 [label="norm"]; + + subgraph cluster_1 { + ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + + ds01 -> disp01 [style=dashed]; + disp01 -> norm01; + + label="Server 1"; + style=filled; + fillcolor=lightgrey; + } + + subgraph cluster_2 { + ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + + ds02 -> disp02 [style=dashed]; + disp02 -> norm02; + + label="Server 2"; + style=filled; + fillcolor=lightgrey; + } + + norm01 -> "merge"; + norm02 -> "merge"; + } """ ############################################################################### diff --git a/examples/06-distributed-post/01-distributed_workflows_on_remote.py b/examples/06-distributed-post/01-distributed_workflows_on_remote.py index e3feb129ed4..65c0182d80d 100644 --- a/examples/06-distributed-post/01-distributed_workflows_on_remote.py +++ b/examples/06-distributed-post/01-distributed_workflows_on_remote.py @@ -9,9 +9,39 @@ sequences are directly created on different servers. These operators are then connected together without having to care that they are on remote processes. -.. image:: 01-operator-dep.svg - :align: center - :width: 400 +.. graphviz:: + + digraph foo { + graph [pad="0", nodesep="0.3", ranksep="0.3"] + node [shape=box, style=filled, fillcolor="#ffcc00", margin="0"]; + rankdir=LR; + splines=line; + + subgraph cluster_1 { + ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + + ds01 -> stress1 [style=dashed]; + + label="Server 1"; + style=filled; + fillcolor=lightgrey; + } + + subgraph cluster_2 { + ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + + ds02 -> stress2 [style=dashed]; + stress2 -> mul; + + label="Server 2"; + style=filled; + fillcolor=lightgrey; + } + + stress1 -> "merge"; + mul -> "merge"; + } + """ ############################################################################### # Import dpf module and its examples files diff --git a/examples/06-distributed-post/02-distributed-msup_expansion.py b/examples/06-distributed-post/02-distributed-msup_expansion.py index 92188c1f1fe..1c0846c5088 100644 --- a/examples/06-distributed-post/02-distributed-msup_expansion.py +++ b/examples/06-distributed-post/02-distributed-msup_expansion.py @@ -11,9 +11,60 @@ To help understand this example the following diagram is provided. It shows the operator chain used to compute the final result. -.. image:: 02-operator-dep.svg - :align: center - :width: 800 +.. graphviz:: + + digraph foo { + graph [pad="0", nodesep="0.3", ranksep="0.3"] + node [shape=box, style=filled, fillcolor="#ffcc00", margin="0"]; + rankdir=LR; + splines=line; + + disp01 [label="displacement"]; + disp02 [label="displacement"]; + mesh01 [label="mesh"]; + mesh02 [label="mesh"]; + + subgraph cluster_1 { + ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + + disp01; mesh01; + + ds01 -> disp01 [style=dashed]; + ds01 -> mesh01 [style=dashed]; + + label="Server 1"; + style=filled; + fillcolor=lightgrey; + } + + subgraph cluster_2 { + ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + + + disp02; mesh02; + + ds02 -> disp02 [style=dashed]; + ds02 -> mesh02 [style=dashed]; + + label="Server 2"; + style=filled; + fillcolor=lightgrey; + } + + disp01 -> "merge_fields"; + mesh01 -> "merged_mesh"; + disp02 -> "merge_fields"; + mesh02 -> "merged_mesh"; + + ds03 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + ds03 -> "response" [style=dashed]; + + "merged_mesh" -> "response"; + "response" -> "expansion"; + "merge_fields" -> "expansion"; + "expansion" -> "component"; + } + """ ############################################################################### diff --git a/examples/06-distributed-post/03-distributed-msup_expansion_steps.py b/examples/06-distributed-post/03-distributed-msup_expansion_steps.py index 99c0a6531f1..487dda9c822 100644 --- a/examples/06-distributed-post/03-distributed-msup_expansion_steps.py +++ b/examples/06-distributed-post/03-distributed-msup_expansion_steps.py @@ -11,9 +11,63 @@ To help understand this example the following diagram is provided. It shows the operator chain used to compute the final result. -.. image:: 03-operator-dep.svg - :align: center - :width: 800 +.. graphviz:: + + digraph foo { + graph [pad="0", nodesep="0.3", ranksep="0.3"] + node [shape=box, style=filled, fillcolor="#ffcc00", margin="0"]; + rankdir=LR; + splines=line; + + disp01 [label="displacement"]; + disp02 [label="displacement"]; + mesh01 [label="mesh"]; + mesh02 [label="mesh"]; + + subgraph cluster_1 { + ds01 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + + disp01; mesh01; + + ds01 -> disp01 [style=dashed]; + ds01 -> mesh01 [style=dashed]; + + label="Server 1"; + style=filled; + fillcolor=lightgrey; + } + + subgraph cluster_2 { + ds02 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + + disp02; mesh02; + + ds02 -> disp02 [style=dashed]; + ds02 -> mesh02 [style=dashed]; + + label="Server 2"; + style=filled; + fillcolor=lightgrey; + } + + disp01 -> "merge_fields"; + mesh01 -> "merge_mesh"; + disp02 -> "merge_fields"; + mesh02 -> "merge_mesh"; + + ds03 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + ds03 -> "response2" [style=dashed]; + ds04 [label="data_src", shape=box, style=filled, fillcolor=cadetblue2]; + ds04 -> "response" [style=dashed]; + + "merge_mesh" -> "response"; + "response" -> "merge_use_pass"; + "response2" -> "merge_use_pass"; + "merge_use_pass" -> "expansion"; + "merge_fields" -> "expansion"; + "expansion" -> "component"; + } + """ ############################################################################### diff --git a/examples/06-distributed-post/06-distributed_stress_averaging.py b/examples/06-distributed-post/06-distributed_stress_averaging.py index 32fa934e43b..8a52763e7ee 100644 --- a/examples/06-distributed-post/06-distributed_stress_averaging.py +++ b/examples/06-distributed-post/06-distributed_stress_averaging.py @@ -28,7 +28,8 @@ files = examples.download_distributed_files() -remote_servers = [dpf.start_local_server(as_global=False) for file in files] +config = dpf.ServerConfig(protocol=dpf.server.CommunicationProtocols.gRPC) +remote_servers = [dpf.start_local_server(as_global=False, config=config) for file in files] ips = [remote_server.ip for remote_server in remote_servers] ports = [remote_server.port for remote_server in remote_servers] @@ -129,8 +130,9 @@ ds.set_domain_result_file_path(files[0], 0) ds.set_domain_result_file_path(files[1], 1) -stress = dpf.Model(ds).results.stress() -fc_single_process = ops.averaging.to_nodal_fc(stress).outputs.fields_container() +model = dpf.Model(ds) +stress = model.results.stress() +fc_single_process = ops.averaging.to_nodal_fc(stress).eval() fc_single_process[0].plot() print(fc_single_process[0].min().data) diff --git a/tests/conftest.py b/tests/conftest.py index 382d66e93b4..e0e8df3d00f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -360,7 +360,9 @@ def server_clayer(request): @pytest.fixture() def restore_awp_root(): - awp_root_name = "AWP_ROOT" + core.misc.__ansys_version__ + ver_to_check = core._version.server_to_ansys_version[str(core.SERVER.version)] + ver_to_check = ver_to_check[2:4] + ver_to_check[5:6] + awp_root_name = "AWP_ROOT" + ver_to_check awp_root_save = os.environ.get(awp_root_name, None) yield # restore awp_root diff --git a/tests/test_launcher.py b/tests/test_launcher.py index 72c6b5af11a..1be0d241e67 100644 --- a/tests/test_launcher.py +++ b/tests/test_launcher.py @@ -63,11 +63,14 @@ def reset_server(): reason="Ans.Dpf.Grpc.bat and .sh need AWP_ROOT221 for 221 install", ) def test_start_local_custom_ansys_path(self, server_config): - path = os.environ["AWP_ROOT" + str(core.misc.__ansys_version__)] + ver_to_check = core._version.server_to_ansys_version[str(core.SERVER.version)] + ver_to_check = ver_to_check[2:4] + ver_to_check[5:6] + awp_root_name = "AWP_ROOT" + ver_to_check + path = os.environ[awp_root_name] try: - os.unsetenv("AWP_ROOT" + str(core.misc.__ansys_version__)) + os.unsetenv(awp_root_name) except: - del os.environ["AWP_ROOT" + str(core.misc.__ansys_version__)] + del os.environ[awp_root_name] try: server = core.start_local_server( ansys_path=path, @@ -80,11 +83,11 @@ def test_start_local_custom_ansys_path(self, server_config): p = psutil.Process(server.info["server_process_id"]) assert path in p.cwd() os.environ[ - "AWP_ROOT" + str(core.misc.__ansys_version__) + awp_root_name ] = path except Exception as e: os.environ[ - "AWP_ROOT" + str(core.misc.__ansys_version__) + awp_root_name ] = path raise e @@ -106,23 +109,26 @@ def test_start_local_no_ansys_path(self, server_config): reason="Ans.Dpf.Grpc.bat and .sh need AWP_ROOT221 for 221 install", ) def test_start_local_ansys_path_environment_variable(self, server_config): + ver_to_check = core._version.server_to_ansys_version[str(core.SERVER.version)] + ver_to_check = ver_to_check[2:4] + ver_to_check[5:6] + awp_root_name = "AWP_ROOT" + ver_to_check awp_root = os.environ[ - "AWP_ROOT" + str(core.misc.__ansys_version__) + awp_root_name ] try: os.environ["ANSYS_DPF_PATH"] = awp_root try: - os.unsetenv("AWP_ROOT" + str(core.misc.__ansys_version__)) + os.unsetenv(awp_root_name) except: del os.environ[ - "AWP_ROOT" + str(core.misc.__ansys_version__) + awp_root_name ] server = core.start_local_server( use_docker_by_default=False, config=server_config ) assert isinstance(server.os, str) os.environ[ - "AWP_ROOT" + str(core.misc.__ansys_version__) + awp_root_name ] = awp_root try: os.unsetenv("ANSYS_DPF_PATH") @@ -131,7 +137,7 @@ def test_start_local_ansys_path_environment_variable(self, server_config): except Exception as e: os.environ[ - "AWP_ROOT" + str(core.misc.__ansys_version__) + awp_root_name ] = awp_root try: os.unsetenv("ANSYS_DPF_PATH") diff --git a/tests/test_service.py b/tests/test_service.py index 4e6c93461b2..afe8d8783d5 100644 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -299,8 +299,9 @@ def test_load_api_without_awp_root(restore_awp_root): legacy_conf = ServerConfig(protocol=CommunicationProtocols.gRPC, legacy=True) loc_serv = dpf.core.start_local_server(config=legacy_conf, as_global=False) - - awp_root_name = "AWP_ROOT" + dpf.core.misc.__ansys_version__ + ver_to_check = dpf.core._version.server_to_ansys_version[str(loc_serv.version)] + ver_to_check = ver_to_check[2:4] + ver_to_check[5:6] + awp_root_name = "AWP_ROOT" + ver_to_check # delete awp_root del os.environ[awp_root_name] diff --git a/tests/testfiles/pythonPlugins/all_types/dpf_types_op.py b/tests/testfiles/pythonPlugins/all_types/dpf_types_op.py index e6b4dd0132b..a790c07d1bc 100644 --- a/tests/testfiles/pythonPlugins/all_types/dpf_types_op.py +++ b/tests/testfiles/pythonPlugins/all_types/dpf_types_op.py @@ -1,6 +1,6 @@ from ansys.dpf.core.custom_operator import CustomOperatorBase from ansys.dpf.core import field, scoping, fields_container, meshes_container, scopings_container,\ - property_field, data_sources, types, workflow, data_tree, string_field + property_field, data_sources, types, workflow, data_tree class ForwardFieldOperator(CustomOperatorBase): @@ -101,6 +101,7 @@ def name(self): class ForwardStringFieldOperator(CustomOperatorBase): def run(self): + from ansys.dpf.core import string_field f = self.get_input(0, string_field.StringField) f = self.get_input(0, types.string_field) self.set_output(0, f)